Salome HOME
Internal modifications and performance improvements
[modules/adao.git] / src / daComposant / daCore / NumericObjects.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2021 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 __doc__ = """
24     Définit les objets numériques génériques.
25 """
26 __author__ = "Jean-Philippe ARGAUD"
27
28 import os, time, copy, types, sys, logging
29 import math, numpy, scipy, scipy.optimize, scipy.version
30 from daCore.BasicObjects import Operator
31 from daCore.PlatformInfo import PlatformInfo
32 mpr = PlatformInfo().MachinePrecision()
33 mfp = PlatformInfo().MaximumPrecision()
34 # logging.getLogger().setLevel(logging.DEBUG)
35
36 # ==============================================================================
37 def ExecuteFunction( triplet ):
38     assert len(triplet) == 3, "Incorrect number of arguments"
39     X, xArgs, funcrepr = triplet
40     __X = numpy.ravel( X ).reshape((-1,1))
41     __sys_path_tmp = sys.path ; sys.path.insert(0,funcrepr["__userFunction__path"])
42     __module = __import__(funcrepr["__userFunction__modl"], globals(), locals(), [])
43     __fonction = getattr(__module,funcrepr["__userFunction__name"])
44     sys.path = __sys_path_tmp ; del __sys_path_tmp
45     if isinstance(xArgs, dict):
46         __HX  = __fonction( __X, **xArgs )
47     else:
48         __HX  = __fonction( __X )
49     return numpy.ravel( __HX )
50
51 # ==============================================================================
52 class FDApproximation(object):
53     """
54     Cette classe sert d'interface pour définir les opérateurs approximés. A la
55     création d'un objet, en fournissant une fonction "Function", on obtient un
56     objet qui dispose de 3 méthodes "DirectOperator", "TangentOperator" et
57     "AdjointOperator". On contrôle l'approximation DF avec l'incrément
58     multiplicatif "increment" valant par défaut 1%, ou avec l'incrément fixe
59     "dX" qui sera multiplié par "increment" (donc en %), et on effectue de DF
60     centrées si le booléen "centeredDF" est vrai.
61     """
62     def __init__(self,
63             name                  = "FDApproximation",
64             Function              = None,
65             centeredDF            = False,
66             increment             = 0.01,
67             dX                    = None,
68             extraArguments        = None,
69             reducingMemoryUse     = False,
70             avoidingRedundancy    = True,
71             toleranceInRedundancy = 1.e-18,
72             lenghtOfRedundancy    = -1,
73             mpEnabled             = False,
74             mpWorkers             = None,
75             mfEnabled             = False,
76             ):
77         self.__name = str(name)
78         self.__extraArgs = extraArguments
79         #
80         if mpEnabled:
81             try:
82                 import multiprocessing
83                 self.__mpEnabled = True
84             except ImportError:
85                 self.__mpEnabled = False
86         else:
87             self.__mpEnabled = False
88         self.__mpWorkers = mpWorkers
89         if self.__mpWorkers is not None and self.__mpWorkers < 1:
90             self.__mpWorkers = None
91         logging.debug("FDA Calculs en multiprocessing : %s (nombre de processus : %s)"%(self.__mpEnabled,self.__mpWorkers))
92         #
93         self.__mfEnabled = bool(mfEnabled)
94         logging.debug("FDA Calculs en multifonctions : %s"%(self.__mfEnabled,))
95         #
96         self.__rmEnabled = bool(reducingMemoryUse)
97         logging.debug("FDA Calculs avec réduction mémoire : %s"%(self.__rmEnabled,))
98         #
99         if avoidingRedundancy:
100             self.__avoidRC = True
101             self.__tolerBP = float(toleranceInRedundancy)
102             self.__lenghtRJ = int(lenghtOfRedundancy)
103             self.__listJPCP = [] # Jacobian Previous Calculated Points
104             self.__listJPCI = [] # Jacobian Previous Calculated Increment
105             self.__listJPCR = [] # Jacobian Previous Calculated Results
106             self.__listJPPN = [] # Jacobian Previous Calculated Point Norms
107             self.__listJPIN = [] # Jacobian Previous Calculated Increment Norms
108         else:
109             self.__avoidRC = False
110         logging.debug("FDA Calculs avec réduction des doublons : %s"%self.__avoidRC)
111         if self.__avoidRC:
112             logging.debug("FDA Tolérance de détermination des doublons : %.2e"%self.__tolerBP)
113         #
114         if self.__mpEnabled:
115             if isinstance(Function,types.FunctionType):
116                 logging.debug("FDA Calculs en multiprocessing : FunctionType")
117                 self.__userFunction__name = Function.__name__
118                 try:
119                     mod = os.path.join(Function.__globals__['filepath'],Function.__globals__['filename'])
120                 except:
121                     mod = os.path.abspath(Function.__globals__['__file__'])
122                 if not os.path.isfile(mod):
123                     raise ImportError("No user defined function or method found with the name %s"%(mod,))
124                 self.__userFunction__modl = os.path.basename(mod).replace('.pyc','').replace('.pyo','').replace('.py','')
125                 self.__userFunction__path = os.path.dirname(mod)
126                 del mod
127                 self.__userOperator = Operator( name = self.__name, fromMethod = Function, avoidingRedundancy = self.__avoidRC, inputAsMultiFunction = self.__mfEnabled, extraArguments = self.__extraArgs )
128                 self.__userFunction = self.__userOperator.appliedTo # Pour le calcul Direct
129             elif isinstance(Function,types.MethodType):
130                 logging.debug("FDA Calculs en multiprocessing : MethodType")
131                 self.__userFunction__name = Function.__name__
132                 try:
133                     mod = os.path.join(Function.__globals__['filepath'],Function.__globals__['filename'])
134                 except:
135                     mod = os.path.abspath(Function.__func__.__globals__['__file__'])
136                 if not os.path.isfile(mod):
137                     raise ImportError("No user defined function or method found with the name %s"%(mod,))
138                 self.__userFunction__modl = os.path.basename(mod).replace('.pyc','').replace('.pyo','').replace('.py','')
139                 self.__userFunction__path = os.path.dirname(mod)
140                 del mod
141                 self.__userOperator = Operator( name = self.__name, fromMethod = Function, avoidingRedundancy = self.__avoidRC, inputAsMultiFunction = self.__mfEnabled, extraArguments = self.__extraArgs )
142                 self.__userFunction = self.__userOperator.appliedTo # Pour le calcul Direct
143             else:
144                 raise TypeError("User defined function or method has to be provided for finite differences approximation.")
145         else:
146             self.__userOperator = Operator( name = self.__name, fromMethod = Function, avoidingRedundancy = self.__avoidRC, inputAsMultiFunction = self.__mfEnabled, extraArguments = self.__extraArgs )
147             self.__userFunction = self.__userOperator.appliedTo
148         #
149         self.__centeredDF = bool(centeredDF)
150         if abs(float(increment)) > 1.e-15:
151             self.__increment  = float(increment)
152         else:
153             self.__increment  = 0.01
154         if dX is None:
155             self.__dX     = None
156         else:
157             self.__dX     = numpy.ravel( dX )
158
159     # ---------------------------------------------------------
160     def __doublon__(self, e, l, n, v=None):
161         __ac, __iac = False, -1
162         for i in range(len(l)-1,-1,-1):
163             if numpy.linalg.norm(e - l[i]) < self.__tolerBP * n[i]:
164                 __ac, __iac = True, i
165                 if v is not None: logging.debug("FDA Cas%s déja calculé, récupération du doublon %i"%(v,__iac))
166                 break
167         return __ac, __iac
168
169     # ---------------------------------------------------------
170     def __listdotwith__(self, __LMatrix, __dotWith = None, __dotTWith = None):
171         "Produit incrémental d'une matrice liste de colonnes avec un vecteur"
172         if not isinstance(__LMatrix, (list,tuple)):
173             raise TypeError("Columnwise list matrix has not the proper type: %s"%type(__LMatrix))
174         if __dotWith is not None:
175             __Idwx = numpy.ravel( __dotWith )
176             assert len(__LMatrix) == __Idwx.size, "Incorrect size of elements"
177             __Produit = numpy.zeros(__LMatrix[0].size)
178             for i, col in enumerate(__LMatrix):
179                 __Produit += float(__Idwx[i]) * col
180             return __Produit
181         elif __dotTWith is not None:
182             _Idwy = numpy.ravel( __dotTWith ).T
183             assert __LMatrix[0].size == _Idwy.size, "Incorrect size of elements"
184             __Produit = numpy.zeros(len(__LMatrix))
185             for i, col in enumerate(__LMatrix):
186                 __Produit[i] = float( _Idwy @ col)
187             return __Produit
188         else:
189             __Produit = None
190         return __Produit
191
192     # ---------------------------------------------------------
193     def DirectOperator(self, X, **extraArgs ):
194         """
195         Calcul du direct à l'aide de la fonction fournie.
196
197         NB : les extraArgs sont là pour assurer la compatibilité d'appel, mais
198         ne doivent pas être données ici à la fonction utilisateur.
199         """
200         logging.debug("FDA Calcul DirectOperator (explicite)")
201         if self.__mfEnabled:
202             _HX = self.__userFunction( X, argsAsSerie = True )
203         else:
204             _HX = numpy.ravel(self.__userFunction( numpy.ravel(X) ))
205         #
206         return _HX
207
208     # ---------------------------------------------------------
209     def TangentMatrix(self, X, dotWith = None, dotTWith = None ):
210         """
211         Calcul de l'opérateur tangent comme la Jacobienne par différences finies,
212         c'est-à-dire le gradient de H en X. On utilise des différences finies
213         directionnelles autour du point X. X est un numpy.ndarray.
214
215         Différences finies centrées (approximation d'ordre 2):
216         1/ Pour chaque composante i de X, on ajoute et on enlève la perturbation
217            dX[i] à la  composante X[i], pour composer X_plus_dXi et X_moins_dXi, et
218            on calcule les réponses HX_plus_dXi = H( X_plus_dXi ) et HX_moins_dXi =
219            H( X_moins_dXi )
220         2/ On effectue les différences (HX_plus_dXi-HX_moins_dXi) et on divise par
221            le pas 2*dXi
222         3/ Chaque résultat, par composante, devient une colonne de la Jacobienne
223
224         Différences finies non centrées (approximation d'ordre 1):
225         1/ Pour chaque composante i de X, on ajoute la perturbation dX[i] à la
226            composante X[i] pour composer X_plus_dXi, et on calcule la réponse
227            HX_plus_dXi = H( X_plus_dXi )
228         2/ On calcule la valeur centrale HX = H(X)
229         3/ On effectue les différences (HX_plus_dXi-HX) et on divise par
230            le pas dXi
231         4/ Chaque résultat, par composante, devient une colonne de la Jacobienne
232
233         """
234         logging.debug("FDA Début du calcul de la Jacobienne")
235         logging.debug("FDA   Incrément de............: %s*X"%float(self.__increment))
236         logging.debug("FDA   Approximation centrée...: %s"%(self.__centeredDF))
237         #
238         if X is None or len(X)==0:
239             raise ValueError("Nominal point X for approximate derivatives can not be None or void (given X: %s)."%(str(X),))
240         #
241         _X = numpy.ravel( X )
242         #
243         if self.__dX is None:
244             _dX  = self.__increment * _X
245         else:
246             _dX = numpy.ravel( self.__dX )
247         assert len(_X) == len(_dX), "Inconsistent dX increment length with respect to the X one"
248         assert _X.size == _dX.size, "Inconsistent dX increment size with respect to the X one"
249         #
250         if (_dX == 0.).any():
251             moyenne = _dX.mean()
252             if moyenne == 0.:
253                 _dX = numpy.where( _dX == 0., float(self.__increment), _dX )
254             else:
255                 _dX = numpy.where( _dX == 0., moyenne, _dX )
256         #
257         __alreadyCalculated  = False
258         if self.__avoidRC:
259             __bidon, __alreadyCalculatedP = self.__doublon__(_X,  self.__listJPCP, self.__listJPPN, None)
260             __bidon, __alreadyCalculatedI = self.__doublon__(_dX, self.__listJPCI, self.__listJPIN, None)
261             if __alreadyCalculatedP == __alreadyCalculatedI > -1:
262                 __alreadyCalculated, __i = True, __alreadyCalculatedP
263                 logging.debug("FDA Cas J déjà calculé, récupération du doublon %i"%__i)
264         #
265         if __alreadyCalculated:
266             logging.debug("FDA   Calcul Jacobienne (par récupération du doublon %i)"%__i)
267             _Jacobienne = self.__listJPCR[__i]
268             logging.debug("FDA Fin du calcul de la Jacobienne")
269             if dotWith is not None:
270                 return numpy.dot(_Jacobienne,   numpy.ravel( dotWith ))
271             elif dotTWith is not None:
272                 return numpy.dot(_Jacobienne.T, numpy.ravel( dotTWith ))
273         else:
274             logging.debug("FDA   Calcul Jacobienne (explicite)")
275             if self.__centeredDF:
276                 #
277                 if self.__mpEnabled and not self.__mfEnabled:
278                     funcrepr = {
279                         "__userFunction__path" : self.__userFunction__path,
280                         "__userFunction__modl" : self.__userFunction__modl,
281                         "__userFunction__name" : self.__userFunction__name,
282                     }
283                     _jobs = []
284                     for i in range( len(_dX) ):
285                         _dXi            = _dX[i]
286                         _X_plus_dXi     = numpy.array( _X, dtype=float )
287                         _X_plus_dXi[i]  = _X[i] + _dXi
288                         _X_moins_dXi    = numpy.array( _X, dtype=float )
289                         _X_moins_dXi[i] = _X[i] - _dXi
290                         #
291                         _jobs.append( (_X_plus_dXi,  self.__extraArgs, funcrepr) )
292                         _jobs.append( (_X_moins_dXi, self.__extraArgs, funcrepr) )
293                     #
294                     import multiprocessing
295                     self.__pool = multiprocessing.Pool(self.__mpWorkers)
296                     _HX_plusmoins_dX = self.__pool.map( ExecuteFunction, _jobs )
297                     self.__pool.close()
298                     self.__pool.join()
299                     #
300                     _Jacobienne  = []
301                     for i in range( len(_dX) ):
302                         _Jacobienne.append( numpy.ravel( _HX_plusmoins_dX[2*i] - _HX_plusmoins_dX[2*i+1] ) / (2.*_dX[i]) )
303                     #
304                 elif self.__mfEnabled:
305                     _xserie = []
306                     for i in range( len(_dX) ):
307                         _dXi            = _dX[i]
308                         _X_plus_dXi     = numpy.array( _X, dtype=float )
309                         _X_plus_dXi[i]  = _X[i] + _dXi
310                         _X_moins_dXi    = numpy.array( _X, dtype=float )
311                         _X_moins_dXi[i] = _X[i] - _dXi
312                         #
313                         _xserie.append( _X_plus_dXi )
314                         _xserie.append( _X_moins_dXi )
315                     #
316                     _HX_plusmoins_dX = self.DirectOperator( _xserie )
317                      #
318                     _Jacobienne  = []
319                     for i in range( len(_dX) ):
320                         _Jacobienne.append( numpy.ravel( _HX_plusmoins_dX[2*i] - _HX_plusmoins_dX[2*i+1] ) / (2.*_dX[i]) )
321                     #
322                 else:
323                     _Jacobienne  = []
324                     for i in range( _dX.size ):
325                         _dXi            = _dX[i]
326                         _X_plus_dXi     = numpy.array( _X, dtype=float )
327                         _X_plus_dXi[i]  = _X[i] + _dXi
328                         _X_moins_dXi    = numpy.array( _X, dtype=float )
329                         _X_moins_dXi[i] = _X[i] - _dXi
330                         #
331                         _HX_plus_dXi    = self.DirectOperator( _X_plus_dXi )
332                         _HX_moins_dXi   = self.DirectOperator( _X_moins_dXi )
333                         #
334                         _Jacobienne.append( numpy.ravel( _HX_plus_dXi - _HX_moins_dXi ) / (2.*_dXi) )
335                 #
336             else:
337                 #
338                 if self.__mpEnabled and not self.__mfEnabled:
339                     funcrepr = {
340                         "__userFunction__path" : self.__userFunction__path,
341                         "__userFunction__modl" : self.__userFunction__modl,
342                         "__userFunction__name" : self.__userFunction__name,
343                     }
344                     _jobs = []
345                     _jobs.append( (_X, self.__extraArgs, funcrepr) )
346                     for i in range( len(_dX) ):
347                         _X_plus_dXi    = numpy.array( _X, dtype=float )
348                         _X_plus_dXi[i] = _X[i] + _dX[i]
349                         #
350                         _jobs.append( (_X_plus_dXi, self.__extraArgs, funcrepr) )
351                     #
352                     import multiprocessing
353                     self.__pool = multiprocessing.Pool(self.__mpWorkers)
354                     _HX_plus_dX = self.__pool.map( ExecuteFunction, _jobs )
355                     self.__pool.close()
356                     self.__pool.join()
357                     #
358                     _HX = _HX_plus_dX.pop(0)
359                     #
360                     _Jacobienne = []
361                     for i in range( len(_dX) ):
362                         _Jacobienne.append( numpy.ravel(( _HX_plus_dX[i] - _HX ) / _dX[i]) )
363                     #
364                 elif self.__mfEnabled:
365                     _xserie = []
366                     _xserie.append( _X )
367                     for i in range( len(_dX) ):
368                         _X_plus_dXi    = numpy.array( _X, dtype=float )
369                         _X_plus_dXi[i] = _X[i] + _dX[i]
370                         #
371                         _xserie.append( _X_plus_dXi )
372                     #
373                     _HX_plus_dX = self.DirectOperator( _xserie )
374                     #
375                     _HX = _HX_plus_dX.pop(0)
376                     #
377                     _Jacobienne = []
378                     for i in range( len(_dX) ):
379                         _Jacobienne.append( numpy.ravel(( _HX_plus_dX[i] - _HX ) / _dX[i]) )
380                    #
381                 else:
382                     _Jacobienne  = []
383                     _HX = self.DirectOperator( _X )
384                     for i in range( _dX.size ):
385                         _dXi            = _dX[i]
386                         _X_plus_dXi     = numpy.array( _X, dtype=float )
387                         _X_plus_dXi[i]  = _X[i] + _dXi
388                         #
389                         _HX_plus_dXi = self.DirectOperator( _X_plus_dXi )
390                         #
391                         _Jacobienne.append( numpy.ravel(( _HX_plus_dXi - _HX ) / _dXi) )
392             #
393             if (dotWith is not None) or (dotTWith is not None):
394                 __Produit = self.__listdotwith__(_Jacobienne, dotWith, dotTWith)
395             else:
396                 __Produit = None
397             if __Produit is None or self.__avoidRC:
398                 _Jacobienne = numpy.transpose( numpy.vstack( _Jacobienne ) )
399                 if self.__avoidRC:
400                     if self.__lenghtRJ < 0: self.__lenghtRJ = 2 * _X.size
401                     while len(self.__listJPCP) > self.__lenghtRJ:
402                         self.__listJPCP.pop(0)
403                         self.__listJPCI.pop(0)
404                         self.__listJPCR.pop(0)
405                         self.__listJPPN.pop(0)
406                         self.__listJPIN.pop(0)
407                     self.__listJPCP.append( copy.copy(_X) )
408                     self.__listJPCI.append( copy.copy(_dX) )
409                     self.__listJPCR.append( copy.copy(_Jacobienne) )
410                     self.__listJPPN.append( numpy.linalg.norm(_X) )
411                     self.__listJPIN.append( numpy.linalg.norm(_Jacobienne) )
412             logging.debug("FDA Fin du calcul de la Jacobienne")
413             if __Produit is not None:
414                 return __Produit
415         #
416         return _Jacobienne
417
418     # ---------------------------------------------------------
419     def TangentOperator(self, paire, **extraArgs ):
420         """
421         Calcul du tangent à l'aide de la Jacobienne.
422
423         NB : les extraArgs sont là pour assurer la compatibilité d'appel, mais
424         ne doivent pas être données ici à la fonction utilisateur.
425         """
426         if self.__mfEnabled:
427             assert len(paire) == 1, "Incorrect length of arguments"
428             _paire = paire[0]
429             assert len(_paire) == 2, "Incorrect number of arguments"
430         else:
431             assert len(paire) == 2, "Incorrect number of arguments"
432             _paire = paire
433         X, dX = _paire
434         if dX is None or len(dX) == 0:
435             #
436             # Calcul de la forme matricielle si le second argument est None
437             # -------------------------------------------------------------
438             _Jacobienne = self.TangentMatrix( X )
439             if self.__mfEnabled: return [_Jacobienne,]
440             else:                return _Jacobienne
441         else:
442             #
443             # Calcul de la valeur linéarisée de H en X appliqué à dX
444             # ------------------------------------------------------
445             _HtX = self.TangentMatrix( X, dotWith = dX )
446             if self.__mfEnabled: return [_HtX,]
447             else:                return _HtX
448
449     # ---------------------------------------------------------
450     def AdjointOperator(self, paire, **extraArgs ):
451         """
452         Calcul de l'adjoint à l'aide de la Jacobienne.
453
454         NB : les extraArgs sont là pour assurer la compatibilité d'appel, mais
455         ne doivent pas être données ici à la fonction utilisateur.
456         """
457         if self.__mfEnabled:
458             assert len(paire) == 1, "Incorrect length of arguments"
459             _paire = paire[0]
460             assert len(_paire) == 2, "Incorrect number of arguments"
461         else:
462             assert len(paire) == 2, "Incorrect number of arguments"
463             _paire = paire
464         X, Y = _paire
465         if Y is None or len(Y) == 0:
466             #
467             # Calcul de la forme matricielle si le second argument est None
468             # -------------------------------------------------------------
469             _JacobienneT = self.TangentMatrix( X ).T
470             if self.__mfEnabled: return [_JacobienneT,]
471             else:                return _JacobienneT
472         else:
473             #
474             # Calcul de la valeur de l'adjoint en X appliqué à Y
475             # --------------------------------------------------
476             _HaY = self.TangentMatrix( X, dotTWith = Y )
477             if self.__mfEnabled: return [_HaY,]
478             else:                return _HaY
479
480 # ==============================================================================
481 def EnsembleOfCenteredPerturbations( _bgcenter, _bgcovariance, _nbmembers ):
482     "Génération d'un ensemble de taille _nbmembers-1 d'états aléatoires centrés"
483     #
484     _bgcenter = numpy.ravel(_bgcenter)[:,None]
485     if _nbmembers < 1:
486         raise ValueError("Number of members has to be strictly more than 1 (given number: %s)."%(str(_nbmembers),))
487     #
488     if _bgcovariance is None:
489         BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers)
490     else:
491         _Z = numpy.random.multivariate_normal(numpy.zeros(_bgcenter.size), _bgcovariance, size=_nbmembers).T
492         BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers) + _Z
493     #
494     return BackgroundEnsemble
495
496 # ==============================================================================
497 def EnsembleOfBackgroundPerturbations( _bgcenter, _bgcovariance, _nbmembers, _withSVD = True):
498     "Génération d'un ensemble de taille _nbmembers-1 d'états aléatoires centrés"
499     def __CenteredRandomAnomalies(Zr, N):
500         """
501         Génère une matrice de N anomalies aléatoires centrées sur Zr selon les
502         notes manuscrites de MB et conforme au code de PS avec eps = -1
503         """
504         eps = -1
505         Q = numpy.identity(N-1)-numpy.ones((N-1,N-1))/numpy.sqrt(N)/(numpy.sqrt(N)-eps)
506         Q = numpy.concatenate((Q, [eps*numpy.ones(N-1)/numpy.sqrt(N)]), axis=0)
507         R, _ = numpy.linalg.qr(numpy.random.normal(size = (N-1,N-1)))
508         Q = numpy.dot(Q,R)
509         Zr = numpy.dot(Q,Zr)
510         return Zr.T
511     #
512     _bgcenter = numpy.ravel(_bgcenter).reshape((-1,1))
513     if _nbmembers < 1:
514         raise ValueError("Number of members has to be strictly more than 1 (given number: %s)."%(str(_nbmembers),))
515     if _bgcovariance is None:
516         BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers)
517     else:
518         if _withSVD:
519             U, s, V = numpy.linalg.svd(_bgcovariance, full_matrices=False)
520             _nbctl = _bgcenter.size
521             if _nbmembers > _nbctl:
522                 _Z = numpy.concatenate((numpy.dot(
523                     numpy.diag(numpy.sqrt(s[:_nbctl])), V[:_nbctl]),
524                     numpy.random.multivariate_normal(numpy.zeros(_nbctl),_bgcovariance,_nbmembers-1-_nbctl)), axis = 0)
525             else:
526                 _Z = numpy.dot(numpy.diag(numpy.sqrt(s[:_nbmembers-1])), V[:_nbmembers-1])
527             _Zca = __CenteredRandomAnomalies(_Z, _nbmembers)
528             BackgroundEnsemble = _bgcenter + _Zca
529         else:
530             if max(abs(_bgcovariance.flatten())) > 0:
531                 _nbctl = _bgcenter.size
532                 _Z = numpy.random.multivariate_normal(numpy.zeros(_nbctl),_bgcovariance,_nbmembers-1)
533                 _Zca = __CenteredRandomAnomalies(_Z, _nbmembers)
534                 BackgroundEnsemble = _bgcenter + _Zca
535             else:
536                 BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers)
537     #
538     return BackgroundEnsemble
539
540 # ==============================================================================
541 def EnsembleMean( __Ensemble ):
542     "Renvoie la moyenne empirique d'un ensemble"
543     return numpy.asarray(__Ensemble).mean(axis=1, dtype=mfp).astype('float').reshape((-1,1))
544
545 # ==============================================================================
546 def EnsembleOfAnomalies( __Ensemble, __OptMean = None, __Normalisation = 1.):
547     "Renvoie les anomalies centrées à partir d'un ensemble"
548     if __OptMean is None:
549         __Em = EnsembleMean( __Ensemble )
550     else:
551         __Em = numpy.ravel( __OptMean ).reshape((-1,1))
552     #
553     return __Normalisation * (numpy.asarray( __Ensemble ) - __Em)
554
555 # ==============================================================================
556 def EnsembleErrorCovariance( __Ensemble, __quick = False ):
557     "Renvoie l'estimation empirique de la covariance d'ensemble"
558     if __quick:
559         # Covariance rapide mais rarement définie positive
560         __Covariance = numpy.cov( __Ensemble )
561     else:
562         # Résultat souvent identique à numpy.cov, mais plus robuste
563         __n, __m = numpy.asarray( __Ensemble ).shape
564         __Anomalies = EnsembleOfAnomalies( __Ensemble )
565         # Estimation empirique
566         __Covariance = ( __Anomalies @ __Anomalies.T ) / (__m-1)
567         # Assure la symétrie
568         __Covariance = ( __Covariance + __Covariance.T ) * 0.5
569         # Assure la positivité
570         __epsilon    = mpr*numpy.trace( __Covariance )
571         __Covariance = __Covariance + __epsilon * numpy.identity(__n)
572     #
573     return __Covariance
574
575 # ==============================================================================
576 def EnsemblePerturbationWithGivenCovariance( __Ensemble, __Covariance, __Seed=None ):
577     "Ajout d'une perturbation à chaque membre d'un ensemble selon une covariance prescrite"
578     if hasattr(__Covariance,"assparsematrix"):
579         if (abs(__Ensemble).mean() > mpr) and (abs(__Covariance.assparsematrix())/abs(__Ensemble).mean() < mpr).all():
580             # Traitement d'une covariance nulle ou presque
581             return __Ensemble
582         if (abs(__Ensemble).mean() <= mpr) and (abs(__Covariance.assparsematrix()) < mpr).all():
583             # Traitement d'une covariance nulle ou presque
584             return __Ensemble
585     else:
586         if (abs(__Ensemble).mean() > mpr) and (abs(__Covariance)/abs(__Ensemble).mean() < mpr).all():
587             # Traitement d'une covariance nulle ou presque
588             return __Ensemble
589         if (abs(__Ensemble).mean() <= mpr) and (abs(__Covariance) < mpr).all():
590             # Traitement d'une covariance nulle ou presque
591             return __Ensemble
592     #
593     __n, __m = __Ensemble.shape
594     if __Seed is not None: numpy.random.seed(__Seed)
595     #
596     if hasattr(__Covariance,"isscalar") and __Covariance.isscalar():
597         # Traitement d'une covariance multiple de l'identité
598         __zero = 0.
599         __std  = numpy.sqrt(__Covariance.assparsematrix())
600         __Ensemble += numpy.random.normal(__zero, __std, size=(__m,__n)).T
601     #
602     elif hasattr(__Covariance,"isvector") and __Covariance.isvector():
603         # Traitement d'une covariance diagonale avec variances non identiques
604         __zero = numpy.zeros(__n)
605         __std  = numpy.sqrt(__Covariance.assparsematrix())
606         __Ensemble += numpy.asarray([numpy.random.normal(__zero, __std) for i in range(__m)]).T
607     #
608     elif hasattr(__Covariance,"ismatrix") and __Covariance.ismatrix():
609         # Traitement d'une covariance pleine
610         __Ensemble += numpy.random.multivariate_normal(numpy.zeros(__n), __Covariance.asfullmatrix(__n), size=__m).T
611     #
612     elif isinstance(__Covariance, numpy.ndarray):
613         # Traitement d'une covariance numpy pleine, sachant qu'on arrive ici en dernier
614         __Ensemble += numpy.random.multivariate_normal(numpy.zeros(__n), __Covariance, size=__m).T
615     #
616     else:
617         raise ValueError("Error in ensemble perturbation with inadequate covariance specification")
618     #
619     return __Ensemble
620
621 # ==============================================================================
622 def CovarianceInflation(
623         InputCovOrEns,
624         InflationType   = None,
625         InflationFactor = None,
626         BackgroundCov   = None,
627         ):
628     """
629     Inflation applicable soit sur Pb ou Pa, soit sur les ensembles EXb ou EXa
630
631     Synthèse : Hunt 2007, section 2.3.5
632     """
633     if InflationFactor is None:
634         return InputCovOrEns
635     else:
636         InflationFactor = float(InflationFactor)
637     #
638     if InflationType in ["MultiplicativeOnAnalysisCovariance", "MultiplicativeOnBackgroundCovariance"]:
639         if InflationFactor < 1.:
640             raise ValueError("Inflation factor for multiplicative inflation has to be greater or equal than 1.")
641         if InflationFactor < 1.+mpr:
642             return InputCovOrEns
643         OutputCovOrEns = InflationFactor**2 * InputCovOrEns
644     #
645     elif InflationType in ["MultiplicativeOnAnalysisAnomalies", "MultiplicativeOnBackgroundAnomalies"]:
646         if InflationFactor < 1.:
647             raise ValueError("Inflation factor for multiplicative inflation has to be greater or equal than 1.")
648         if InflationFactor < 1.+mpr:
649             return InputCovOrEns
650         InputCovOrEnsMean = InputCovOrEns.mean(axis=1, dtype=mfp).astype('float')
651         OutputCovOrEns = InputCovOrEnsMean[:,numpy.newaxis] \
652             + InflationFactor * (InputCovOrEns - InputCovOrEnsMean[:,numpy.newaxis])
653     #
654     elif InflationType in ["AdditiveOnAnalysisCovariance", "AdditiveOnBackgroundCovariance"]:
655         if InflationFactor < 0.:
656             raise ValueError("Inflation factor for additive inflation has to be greater or equal than 0.")
657         if InflationFactor < mpr:
658             return InputCovOrEns
659         __n, __m = numpy.asarray(InputCovOrEns).shape
660         if __n != __m:
661             raise ValueError("Additive inflation can only be applied to squared (covariance) matrix.")
662         OutputCovOrEns = (1. - InflationFactor) * InputCovOrEns + InflationFactor * numpy.identity(__n)
663     #
664     elif InflationType == "HybridOnBackgroundCovariance":
665         if InflationFactor < 0.:
666             raise ValueError("Inflation factor for hybrid inflation has to be greater or equal than 0.")
667         if InflationFactor < mpr:
668             return InputCovOrEns
669         __n, __m = numpy.asarray(InputCovOrEns).shape
670         if __n != __m:
671             raise ValueError("Additive inflation can only be applied to squared (covariance) matrix.")
672         if BackgroundCov is None:
673             raise ValueError("Background covariance matrix B has to be given for hybrid inflation.")
674         if InputCovOrEns.shape != BackgroundCov.shape:
675             raise ValueError("Ensemble covariance matrix has to be of same size than background covariance matrix B.")
676         OutputCovOrEns = (1. - InflationFactor) * InputCovOrEns + InflationFactor * BackgroundCov
677     #
678     elif InflationType == "Relaxation":
679         raise NotImplementedError("InflationType Relaxation")
680     #
681     else:
682         raise ValueError("Error in inflation type, '%s' is not a valid keyword."%InflationType)
683     #
684     return OutputCovOrEns
685
686 # ==============================================================================
687 def HessienneEstimation(nb, HaM, HtM, BI, RI):
688     "Estimation de la Hessienne"
689     #
690     HessienneI = []
691     for i in range(int(nb)):
692         _ee    = numpy.zeros((nb,1))
693         _ee[i] = 1.
694         _HtEE  = numpy.dot(HtM,_ee).reshape((-1,1))
695         HessienneI.append( numpy.ravel( BI * _ee + HaM * (RI * _HtEE) ) )
696     #
697     A = numpy.linalg.inv(numpy.array( HessienneI ))
698     #
699     if min(A.shape) != max(A.shape):
700         raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(selfA._name,str(A.shape)))
701     if (numpy.diag(A) < 0).any():
702         raise ValueError("The %s a posteriori covariance matrix A has at least one negative value on its diagonal. There is an error in the observation operator, please check it."%(selfA._name,))
703     if logging.getLogger().level < logging.WARNING: # La verification n'a lieu qu'en debug
704         try:
705             L = numpy.linalg.cholesky( A )
706         except:
707             raise ValueError("The %s a posteriori covariance matrix A is not symmetric positive-definite. Please check your a priori covariances and your observation operator."%(selfA._name,))
708     #
709     return A
710
711 # ==============================================================================
712 def QuantilesEstimations(selfA, A, Xa, HXa = None, Hm = None, HtM = None):
713     "Estimation des quantiles a posteriori (selfA est modifié)"
714     nbsamples = selfA._parameters["NumberOfSamplesForQuantiles"]
715     #
716     # Traitement des bornes
717     if "StateBoundsForQuantiles" in selfA._parameters:
718         LBounds = selfA._parameters["StateBoundsForQuantiles"] # Prioritaire
719     elif "Bounds" in selfA._parameters:
720         LBounds = selfA._parameters["Bounds"]  # Défaut raisonnable
721     else:
722         LBounds = None
723     if LBounds is not None:
724         LBounds = ForceNumericBounds( LBounds )
725     _Xa = numpy.ravel(Xa)
726     #
727     # Échantillonnage des états
728     YfQ  = None
729     EXr  = None
730     for i in range(nbsamples):
731         if selfA._parameters["SimulationForQuantiles"] == "Linear" and HtM is not None and HXa is not None:
732             dXr = (numpy.random.multivariate_normal(_Xa,A) - _Xa).reshape((-1,1))
733             if LBounds is not None: # "EstimateProjection" par défaut
734                 dXr = numpy.max(numpy.hstack((dXr,LBounds[:,0].reshape((-1,1))) - Xa),axis=1)
735                 dXr = numpy.min(numpy.hstack((dXr,LBounds[:,1].reshape((-1,1))) - Xa),axis=1)
736             dYr = HtM @ dXr
737             Yr = HXa.reshape((-1,1)) + dYr
738             if selfA._toStore("SampledStateForQuantiles"): Xr = _Xa + numpy.ravel(dXr)
739         elif selfA._parameters["SimulationForQuantiles"] == "NonLinear" and Hm is not None:
740             Xr = numpy.random.multivariate_normal(_Xa,A)
741             if LBounds is not None: # "EstimateProjection" par défaut
742                 Xr = numpy.max(numpy.hstack((Xr.reshape((-1,1)),LBounds[:,0].reshape((-1,1)))),axis=1)
743                 Xr = numpy.min(numpy.hstack((Xr.reshape((-1,1)),LBounds[:,1].reshape((-1,1)))),axis=1)
744             Yr = Hm( Xr )
745         else:
746             raise ValueError("Quantile simulations has only to be Linear or NonLinear.")
747         #
748         if YfQ is None:
749             YfQ = Yr.reshape((-1,1))
750             if selfA._toStore("SampledStateForQuantiles"): EXr = Xr.reshape((-1,1))
751         else:
752             YfQ = numpy.hstack((YfQ,Yr.reshape((-1,1))))
753             if selfA._toStore("SampledStateForQuantiles"): EXr = numpy.hstack((EXr,Xr.reshape((-1,1))))
754     #
755     # Extraction des quantiles
756     YfQ.sort(axis=-1)
757     YQ = None
758     for quantile in selfA._parameters["Quantiles"]:
759         if not (0. <= float(quantile) <= 1.): continue
760         indice = int(nbsamples * float(quantile) - 1./nbsamples)
761         if YQ is None: YQ = YfQ[:,indice].reshape((-1,1))
762         else:          YQ = numpy.hstack((YQ,YfQ[:,indice].reshape((-1,1))))
763     if YQ is not None: # Liste non vide de quantiles
764         selfA.StoredVariables["SimulationQuantiles"].store( YQ )
765     if selfA._toStore("SampledStateForQuantiles"):
766         selfA.StoredVariables["SampledStateForQuantiles"].store( EXr )
767     #
768     return 0
769
770 # ==============================================================================
771 def ForceNumericBounds( __Bounds ):
772     "Force les bornes à être des valeurs numériques, sauf si globalement None"
773     # Conserve une valeur par défaut à None s'il n'y a pas de bornes
774     if __Bounds is None: return None
775     # Converti toutes les bornes individuelles None à +/- l'infini
776     __Bounds = numpy.asarray( __Bounds, dtype=float )
777     if len(__Bounds.shape) != 2 or min(__Bounds.shape) <= 0 or __Bounds.shape[1] != 2:
778         raise ValueError("Incorrectly shaped bounds data")
779     __Bounds[numpy.isnan(__Bounds[:,0]),0] = -sys.float_info.max
780     __Bounds[numpy.isnan(__Bounds[:,1]),1] =  sys.float_info.max
781     return __Bounds
782
783 # ==============================================================================
784 def RecentredBounds( __Bounds, __Center):
785     "Recentre les bornes autour de 0, sauf si globalement None"
786     # Conserve une valeur par défaut à None s'il n'y a pas de bornes
787     if __Bounds is None: return None
788     # Recentre les valeurs numériques de bornes
789     return ForceNumericBounds( __Bounds ) - numpy.ravel( __Center ).transpose((-1,1))
790
791 # ==============================================================================
792 def ApplyBounds( __Vector, __Bounds, __newClip = True):
793     "Applique des bornes numériques à un point"
794     # Conserve une valeur par défaut s'il n'y a pas de bornes
795     if __Bounds is None: return __Vector
796     #
797     if not isinstance(__Vector, numpy.ndarray): # Is an array
798         raise ValueError("Incorrect array definition of vector data")
799     if not isinstance(__Bounds, numpy.ndarray): # Is an array
800         raise ValueError("Incorrect array definition of bounds data")
801     if 2*__Vector.size != __Bounds.size: # Is a 2 column array of vector lenght
802         raise ValueError("Incorrect bounds number to be applied for this vector")
803     if len(__Bounds.shape) != 2 or min(__Bounds.shape) <= 0 or __Bounds.shape[1] != 2:
804         raise ValueError("Incorrectly shaped bounds data")
805     #
806     if __newClip:
807         __Vector = __Vector.clip(
808             __Bounds[:,0].reshape(__Vector.shape),
809             __Bounds[:,1].reshape(__Vector.shape),
810             )
811     else:
812         __Vector = numpy.max(numpy.hstack((__Vector.reshape((-1,1)),numpy.asmatrix(__Bounds)[:,0])),axis=1)
813         __Vector = numpy.min(numpy.hstack((__Vector.reshape((-1,1)),numpy.asmatrix(__Bounds)[:,1])),axis=1)
814         __Vector = numpy.asarray(__Vector)
815     #
816     return __Vector
817
818 # ==============================================================================
819 def c2ukf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
820     """
821     Constrained Unscented Kalman Filter
822     """
823     if selfA._parameters["EstimationOf"] == "Parameters":
824         selfA._parameters["StoreInternalVariables"] = True
825     selfA._parameters["Bounds"] = ForceNumericBounds( selfA._parameters["Bounds"] )
826     #
827     L     = Xb.size
828     Alpha = selfA._parameters["Alpha"]
829     Beta  = selfA._parameters["Beta"]
830     if selfA._parameters["Kappa"] == 0:
831         if selfA._parameters["EstimationOf"] == "State":
832             Kappa = 0
833         elif selfA._parameters["EstimationOf"] == "Parameters":
834             Kappa = 3 - L
835     else:
836         Kappa = selfA._parameters["Kappa"]
837     Lambda = float( Alpha**2 ) * ( L + Kappa ) - L
838     Gamma  = math.sqrt( L + Lambda )
839     #
840     Ww = []
841     Ww.append( 0. )
842     for i in range(2*L):
843         Ww.append( 1. / (2.*(L + Lambda)) )
844     #
845     Wm = numpy.array( Ww )
846     Wm[0] = Lambda / (L + Lambda)
847     Wc = numpy.array( Ww )
848     Wc[0] = Lambda / (L + Lambda) + (1. - Alpha**2 + Beta)
849     #
850     # Opérateurs
851     Hm = HO["Direct"].appliedControledFormTo
852     #
853     if selfA._parameters["EstimationOf"] == "State":
854         Mm = EM["Direct"].appliedControledFormTo
855     #
856     if CM is not None and "Tangent" in CM and U is not None:
857         Cm = CM["Tangent"].asMatrix(Xb)
858     else:
859         Cm = None
860     #
861     # Durée d'observation et tailles
862     if hasattr(Y,"stepnumber"):
863         duration = Y.stepnumber()
864         __p = numpy.cumprod(Y.shape())[-1]
865     else:
866         duration = 2
867         __p = numpy.array(Y).size
868     #
869     # Précalcul des inversions de B et R
870     if selfA._parameters["StoreInternalVariables"] \
871         or selfA._toStore("CostFunctionJ") \
872         or selfA._toStore("CostFunctionJb") \
873         or selfA._toStore("CostFunctionJo") \
874         or selfA._toStore("CurrentOptimum") \
875         or selfA._toStore("APosterioriCovariance"):
876         BI = B.getI()
877         RI = R.getI()
878     #
879     __n = Xb.size
880     #
881     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
882         Xn = Xb
883         if hasattr(B,"asfullmatrix"):
884             Pn = B.asfullmatrix(__n)
885         else:
886             Pn = B
887         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
888         selfA.StoredVariables["Analysis"].store( Xb )
889         if selfA._toStore("APosterioriCovariance"):
890             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
891     elif selfA._parameters["nextStep"]:
892         Xn = selfA._getInternalState("Xn")
893         Pn = selfA._getInternalState("Pn")
894     #
895     if selfA._parameters["EstimationOf"] == "Parameters":
896         XaMin            = Xn
897         previousJMinimum = numpy.finfo(float).max
898     #
899     for step in range(duration-1):
900         if hasattr(Y,"store"):
901             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
902         else:
903             Ynpu = numpy.ravel( Y ).reshape((__p,1))
904         #
905         if U is not None:
906             if hasattr(U,"store") and len(U)>1:
907                 Un = numpy.ravel( U[step] ).reshape((-1,1))
908             elif hasattr(U,"store") and len(U)==1:
909                 Un = numpy.ravel( U[0] ).reshape((-1,1))
910             else:
911                 Un = numpy.ravel( U ).reshape((-1,1))
912         else:
913             Un = None
914         #
915         Pndemi = numpy.real(scipy.linalg.sqrtm(Pn))
916         Xnp = numpy.hstack([Xn, Xn+Gamma*Pndemi, Xn-Gamma*Pndemi])
917         nbSpts = 2*Xn.size+1
918         #
919         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
920             for point in range(nbSpts):
921                 Xnp[:,point] = ApplyBounds( Xnp[:,point], selfA._parameters["Bounds"] )
922         #
923         XEtnnp = []
924         for point in range(nbSpts):
925             if selfA._parameters["EstimationOf"] == "State":
926                 XEtnnpi = numpy.asarray( Mm( (Xnp[:,point], Un) ) ).reshape((-1,1))
927                 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
928                     Cm = Cm.reshape(Xn.size,Un.size) # ADAO & check shape
929                     XEtnnpi = XEtnnpi + Cm * Un
930                 if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
931                     XEtnnpi = ApplyBounds( XEtnnpi, selfA._parameters["Bounds"] )
932             elif selfA._parameters["EstimationOf"] == "Parameters":
933                 # --- > Par principe, M = Id, Q = 0
934                 XEtnnpi = Xnp[:,point]
935             XEtnnp.append( numpy.ravel(XEtnnpi).reshape((-1,1)) )
936         XEtnnp = numpy.concatenate( XEtnnp, axis=1 )
937         #
938         Xncm = ( XEtnnp * Wm ).sum(axis=1)
939         #
940         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
941             Xncm = ApplyBounds( Xncm, selfA._parameters["Bounds"] )
942         #
943         if selfA._parameters["EstimationOf"] == "State":        Pnm = Q
944         elif selfA._parameters["EstimationOf"] == "Parameters": Pnm = 0.
945         for point in range(nbSpts):
946             Pnm += Wc[i] * ((XEtnnp[:,point]-Xncm).reshape((-1,1)) * (XEtnnp[:,point]-Xncm))
947         #
948         if selfA._parameters["EstimationOf"] == "Parameters" and selfA._parameters["Bounds"] is not None:
949             Pnmdemi = selfA._parameters["Reconditioner"] * numpy.real(scipy.linalg.sqrtm(Pnm))
950         else:
951             Pnmdemi = numpy.real(scipy.linalg.sqrtm(Pnm))
952         #
953         Xnnp = numpy.hstack([Xncm.reshape((-1,1)), Xncm.reshape((-1,1))+Gamma*Pnmdemi, Xncm.reshape((-1,1))-Gamma*Pnmdemi])
954         #
955         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
956             for point in range(nbSpts):
957                 Xnnp[:,point] = ApplyBounds( Xnnp[:,point], selfA._parameters["Bounds"] )
958         #
959         Ynnp = []
960         for point in range(nbSpts):
961             if selfA._parameters["EstimationOf"] == "State":
962                 Ynnpi = Hm( (Xnnp[:,point], None) )
963             elif selfA._parameters["EstimationOf"] == "Parameters":
964                 Ynnpi = Hm( (Xnnp[:,point], Un) )
965             Ynnp.append( numpy.ravel(Ynnpi).reshape((-1,1)) )
966         Ynnp = numpy.concatenate( Ynnp, axis=1 )
967         #
968         Yncm = ( Ynnp * Wm ).sum(axis=1)
969         #
970         Pyyn = R
971         Pxyn = 0.
972         for point in range(nbSpts):
973             Pyyn += Wc[i] * ((Ynnp[:,point]-Yncm).reshape((-1,1)) * (Ynnp[:,point]-Yncm))
974             Pxyn += Wc[i] * ((Xnnp[:,point]-Xncm).reshape((-1,1)) * (Ynnp[:,point]-Yncm))
975         #
976         _Innovation  = Ynpu - Yncm.reshape((-1,1))
977         if selfA._parameters["EstimationOf"] == "Parameters":
978             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
979                 _Innovation = _Innovation - Cm * Un
980         #
981         Kn = Pxyn * Pyyn.I
982         Xn = Xncm.reshape((-1,1)) + Kn * _Innovation
983         Pn = Pnm - Kn * Pyyn * Kn.T
984         #
985         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
986             Xn = ApplyBounds( Xn, selfA._parameters["Bounds"] )
987         #
988         Xa = Xn # Pointeurs
989         #--------------------------
990         selfA._setInternalState("Xn", Xn)
991         selfA._setInternalState("Pn", Pn)
992         #--------------------------
993         #
994         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
995         # ---> avec analysis
996         selfA.StoredVariables["Analysis"].store( Xa )
997         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
998             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( Hm((Xa, Un)) )
999         if selfA._toStore("InnovationAtCurrentAnalysis"):
1000             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
1001         # ---> avec current state
1002         if selfA._parameters["StoreInternalVariables"] \
1003             or selfA._toStore("CurrentState"):
1004             selfA.StoredVariables["CurrentState"].store( Xn )
1005         if selfA._toStore("ForecastState"):
1006             selfA.StoredVariables["ForecastState"].store( Xncm )
1007         if selfA._toStore("ForecastCovariance"):
1008             selfA.StoredVariables["ForecastCovariance"].store( Pnm )
1009         if selfA._toStore("BMA"):
1010             selfA.StoredVariables["BMA"].store( Xncm - Xa )
1011         if selfA._toStore("InnovationAtCurrentState"):
1012             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
1013         if selfA._toStore("SimulatedObservationAtCurrentState") \
1014             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1015             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( Yncm )
1016         # ---> autres
1017         if selfA._parameters["StoreInternalVariables"] \
1018             or selfA._toStore("CostFunctionJ") \
1019             or selfA._toStore("CostFunctionJb") \
1020             or selfA._toStore("CostFunctionJo") \
1021             or selfA._toStore("CurrentOptimum") \
1022             or selfA._toStore("APosterioriCovariance"):
1023             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
1024             Jo  = float( 0.5 * _Innovation.T * (RI * _Innovation) )
1025             J   = Jb + Jo
1026             selfA.StoredVariables["CostFunctionJb"].store( Jb )
1027             selfA.StoredVariables["CostFunctionJo"].store( Jo )
1028             selfA.StoredVariables["CostFunctionJ" ].store( J )
1029             #
1030             if selfA._toStore("IndexOfOptimum") \
1031                 or selfA._toStore("CurrentOptimum") \
1032                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
1033                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
1034                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
1035                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1036                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1037             if selfA._toStore("IndexOfOptimum"):
1038                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1039             if selfA._toStore("CurrentOptimum"):
1040                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
1041             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1042                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
1043             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1044                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1045             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1046                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1047             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1048                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1049         if selfA._toStore("APosterioriCovariance"):
1050             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
1051         if selfA._parameters["EstimationOf"] == "Parameters" \
1052             and J < previousJMinimum:
1053             previousJMinimum    = J
1054             XaMin               = Xa
1055             if selfA._toStore("APosterioriCovariance"):
1056                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
1057     #
1058     # Stockage final supplémentaire de l'optimum en estimation de paramètres
1059     # ----------------------------------------------------------------------
1060     if selfA._parameters["EstimationOf"] == "Parameters":
1061         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1062         selfA.StoredVariables["Analysis"].store( XaMin )
1063         if selfA._toStore("APosterioriCovariance"):
1064             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
1065         if selfA._toStore("BMA"):
1066             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
1067     #
1068     return 0
1069
1070 # ==============================================================================
1071 def cekf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
1072     """
1073     Contrained Extended Kalman Filter
1074     """
1075     if selfA._parameters["EstimationOf"] == "Parameters":
1076         selfA._parameters["StoreInternalVariables"] = True
1077     selfA._parameters["Bounds"] = ForceNumericBounds( selfA._parameters["Bounds"] )
1078     #
1079     # Opérateurs
1080     H = HO["Direct"].appliedControledFormTo
1081     #
1082     if selfA._parameters["EstimationOf"] == "State":
1083         M = EM["Direct"].appliedControledFormTo
1084     #
1085     if CM is not None and "Tangent" in CM and U is not None:
1086         Cm = CM["Tangent"].asMatrix(Xb)
1087     else:
1088         Cm = None
1089     #
1090     # Durée d'observation et tailles
1091     if hasattr(Y,"stepnumber"):
1092         duration = Y.stepnumber()
1093         __p = numpy.cumprod(Y.shape())[-1]
1094     else:
1095         duration = 2
1096         __p = numpy.array(Y).size
1097     #
1098     # Précalcul des inversions de B et R
1099     if selfA._parameters["StoreInternalVariables"] \
1100         or selfA._toStore("CostFunctionJ") \
1101         or selfA._toStore("CostFunctionJb") \
1102         or selfA._toStore("CostFunctionJo") \
1103         or selfA._toStore("CurrentOptimum") \
1104         or selfA._toStore("APosterioriCovariance"):
1105         BI = B.getI()
1106         RI = R.getI()
1107     #
1108     __n = Xb.size
1109     #
1110     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
1111         Xn = Xb
1112         Pn = B
1113         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1114         selfA.StoredVariables["Analysis"].store( Xb )
1115         if selfA._toStore("APosterioriCovariance"):
1116             if hasattr(B,"asfullmatrix"):
1117                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
1118             else:
1119                 selfA.StoredVariables["APosterioriCovariance"].store( B )
1120         selfA._setInternalState("seed", numpy.random.get_state())
1121     elif selfA._parameters["nextStep"]:
1122         Xn = selfA._getInternalState("Xn")
1123         Pn = selfA._getInternalState("Pn")
1124     #
1125     if selfA._parameters["EstimationOf"] == "Parameters":
1126         XaMin            = Xn
1127         previousJMinimum = numpy.finfo(float).max
1128     #
1129     for step in range(duration-1):
1130         if hasattr(Y,"store"):
1131             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
1132         else:
1133             Ynpu = numpy.ravel( Y ).reshape((__p,1))
1134         #
1135         Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xn)
1136         Ht = Ht.reshape(Ynpu.size,Xn.size) # ADAO & check shape
1137         Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = Xn)
1138         Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape
1139         #
1140         if selfA._parameters["EstimationOf"] == "State":
1141             Mt = EM["Tangent"].asMatrix(ValueForMethodForm = Xn)
1142             Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape
1143             Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = Xn)
1144             Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape
1145         #
1146         if U is not None:
1147             if hasattr(U,"store") and len(U)>1:
1148                 Un = numpy.ravel( U[step] ).reshape((-1,1))
1149             elif hasattr(U,"store") and len(U)==1:
1150                 Un = numpy.ravel( U[0] ).reshape((-1,1))
1151             else:
1152                 Un = numpy.ravel( U ).reshape((-1,1))
1153         else:
1154             Un = None
1155         #
1156         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
1157             Xn = ApplyBounds( Xn, selfA._parameters["Bounds"] )
1158         #
1159         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
1160             Xn_predicted = numpy.ravel( M( (Xn, Un) ) ).reshape((__n,1))
1161             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
1162                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
1163                 Xn_predicted = Xn_predicted + Cm * Un
1164             Pn_predicted = Q + Mt * (Pn * Ma)
1165         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
1166             # --- > Par principe, M = Id, Q = 0
1167             Xn_predicted = Xn
1168             Pn_predicted = Pn
1169         #
1170         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
1171             Xn_predicted = ApplyBounds( Xn_predicted, selfA._parameters["Bounds"] )
1172         #
1173         if selfA._parameters["EstimationOf"] == "State":
1174             HX_predicted = numpy.ravel( H( (Xn_predicted, None) ) ).reshape((__p,1))
1175             _Innovation  = Ynpu - HX_predicted
1176         elif selfA._parameters["EstimationOf"] == "Parameters":
1177             HX_predicted = numpy.ravel( H( (Xn_predicted, Un) ) ).reshape((__p,1))
1178             _Innovation  = Ynpu - HX_predicted
1179             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
1180                 _Innovation = _Innovation - Cm * Un
1181         #
1182         Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
1183         Xn = Xn_predicted + Kn * _Innovation
1184         Pn = Pn_predicted - Kn * Ht * Pn_predicted
1185         #
1186         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
1187             Xn = ApplyBounds( Xn, selfA._parameters["Bounds"] )
1188         #
1189         Xa = Xn # Pointeurs
1190         #--------------------------
1191         selfA._setInternalState("Xn", Xn)
1192         selfA._setInternalState("Pn", Pn)
1193         #--------------------------
1194         #
1195         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1196         # ---> avec analysis
1197         selfA.StoredVariables["Analysis"].store( Xa )
1198         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
1199             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( H((Xa, Un)) )
1200         if selfA._toStore("InnovationAtCurrentAnalysis"):
1201             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
1202         # ---> avec current state
1203         if selfA._parameters["StoreInternalVariables"] \
1204             or selfA._toStore("CurrentState"):
1205             selfA.StoredVariables["CurrentState"].store( Xn )
1206         if selfA._toStore("ForecastState"):
1207             selfA.StoredVariables["ForecastState"].store( Xn_predicted )
1208         if selfA._toStore("ForecastCovariance"):
1209             selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted )
1210         if selfA._toStore("BMA"):
1211             selfA.StoredVariables["BMA"].store( Xn_predicted - Xa )
1212         if selfA._toStore("InnovationAtCurrentState"):
1213             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
1214         if selfA._toStore("SimulatedObservationAtCurrentState") \
1215             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1216             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
1217         # ---> autres
1218         if selfA._parameters["StoreInternalVariables"] \
1219             or selfA._toStore("CostFunctionJ") \
1220             or selfA._toStore("CostFunctionJb") \
1221             or selfA._toStore("CostFunctionJo") \
1222             or selfA._toStore("CurrentOptimum") \
1223             or selfA._toStore("APosterioriCovariance"):
1224             Jb  = float( 0.5 * (Xa - Xb).T @ (BI @ (Xa - Xb)) )
1225             Jo  = float( 0.5 * _Innovation.T @ (RI @ _Innovation) )
1226             J   = Jb + Jo
1227             selfA.StoredVariables["CostFunctionJb"].store( Jb )
1228             selfA.StoredVariables["CostFunctionJo"].store( Jo )
1229             selfA.StoredVariables["CostFunctionJ" ].store( J )
1230             #
1231             if selfA._toStore("IndexOfOptimum") \
1232                 or selfA._toStore("CurrentOptimum") \
1233                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
1234                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
1235                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
1236                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1237                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1238             if selfA._toStore("IndexOfOptimum"):
1239                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1240             if selfA._toStore("CurrentOptimum"):
1241                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
1242             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1243                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
1244             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1245                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1246             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1247                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1248             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1249                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1250         if selfA._toStore("APosterioriCovariance"):
1251             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
1252         if selfA._parameters["EstimationOf"] == "Parameters" \
1253             and J < previousJMinimum:
1254             previousJMinimum    = J
1255             XaMin               = Xa
1256             if selfA._toStore("APosterioriCovariance"):
1257                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
1258     #
1259     # Stockage final supplémentaire de l'optimum en estimation de paramètres
1260     # ----------------------------------------------------------------------
1261     if selfA._parameters["EstimationOf"] == "Parameters":
1262         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1263         selfA.StoredVariables["Analysis"].store( XaMin )
1264         if selfA._toStore("APosterioriCovariance"):
1265             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
1266         if selfA._toStore("BMA"):
1267             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
1268     #
1269     return 0
1270
1271 # ==============================================================================
1272 def enks(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="EnKS16-KalmanFilterFormula"):
1273     """
1274     EnKS
1275     """
1276     #
1277     # Opérateurs
1278     H = HO["Direct"].appliedControledFormTo
1279     #
1280     if selfA._parameters["EstimationOf"] == "State":
1281         M = EM["Direct"].appliedControledFormTo
1282     #
1283     if CM is not None and "Tangent" in CM and U is not None:
1284         Cm = CM["Tangent"].asMatrix(Xb)
1285     else:
1286         Cm = None
1287     #
1288     # Précalcul des inversions de B et R
1289     RIdemi = R.sqrtmI()
1290     #
1291     # Durée d'observation et tailles
1292     LagL = selfA._parameters["SmootherLagL"]
1293     if (not hasattr(Y,"store")) or (not hasattr(Y,"stepnumber")):
1294         raise ValueError("Fixed-lag smoother requires a series of observation")
1295     if Y.stepnumber() < LagL:
1296         raise ValueError("Fixed-lag smoother requires a series of observation greater then the lag L")
1297     duration = Y.stepnumber()
1298     __p = numpy.cumprod(Y.shape())[-1]
1299     __n = Xb.size
1300     __m = selfA._parameters["NumberOfMembers"]
1301     #
1302     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
1303         selfA.StoredVariables["Analysis"].store( Xb )
1304         if selfA._toStore("APosterioriCovariance"):
1305             if hasattr(B,"asfullmatrix"):
1306                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
1307             else:
1308                 selfA.StoredVariables["APosterioriCovariance"].store( B )
1309     #
1310     # Calcul direct initial (on privilégie la mémorisation au recalcul)
1311     __seed = numpy.random.get_state()
1312     selfB = copy.deepcopy(selfA)
1313     selfB._parameters["StoreSupplementaryCalculations"] = ["CurrentEnsembleState"]
1314     if VariantM == "EnKS16-KalmanFilterFormula":
1315         etkf(selfB, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM = "KalmanFilterFormula")
1316     else:
1317         raise ValueError("VariantM has to be chosen in the authorized methods list.")
1318     if LagL > 0:
1319         EL  = selfB.StoredVariables["CurrentEnsembleState"][LagL-1]
1320     else:
1321         EL = EnsembleOfBackgroundPerturbations( Xb, None, __m ) # Cf. etkf
1322     selfA._parameters["SetSeed"] = numpy.random.set_state(__seed)
1323     #
1324     for step in range(LagL,duration-1):
1325         #
1326         sEL = selfB.StoredVariables["CurrentEnsembleState"][step+1-LagL:step+1]
1327         sEL.append(None)
1328         #
1329         if hasattr(Y,"store"):
1330             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
1331         else:
1332             Ynpu = numpy.ravel( Y ).reshape((__p,1))
1333         #
1334         if U is not None:
1335             if hasattr(U,"store") and len(U)>1:
1336                 Un = numpy.ravel( U[step] ).reshape((-1,1))
1337             elif hasattr(U,"store") and len(U)==1:
1338                 Un = numpy.ravel( U[0] ).reshape((-1,1))
1339             else:
1340                 Un = numpy.ravel( U ).reshape((-1,1))
1341         else:
1342             Un = None
1343         #
1344         #--------------------------
1345         if VariantM == "EnKS16-KalmanFilterFormula":
1346             if selfA._parameters["EstimationOf"] == "State": # Forecast
1347                 EL = M( [(EL[:,i], Un) for i in range(__m)],
1348                     argsAsSerie = True,
1349                     returnSerieAsArrayMatrix = True )
1350                 EL = EnsemblePerturbationWithGivenCovariance( EL, Q )
1351                 EZ = H( [(EL[:,i], Un) for i in range(__m)],
1352                     argsAsSerie = True,
1353                     returnSerieAsArrayMatrix = True )
1354                 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
1355                     Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
1356                     EZ = EZ + Cm * Un
1357             elif selfA._parameters["EstimationOf"] == "Parameters":
1358                 # --- > Par principe, M = Id, Q = 0
1359                 EZ = H( [(EL[:,i], Un) for i in range(__m)],
1360                     argsAsSerie = True,
1361                     returnSerieAsArrayMatrix = True )
1362             #
1363             vEm   = EL.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1364             vZm   = EZ.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
1365             #
1366             mS    = RIdemi @ EnsembleOfAnomalies( EZ, vZm, 1./math.sqrt(__m-1) )
1367             mS    = mS.reshape((-1,__m)) # Pour dimension 1
1368             delta = RIdemi @ ( Ynpu - vZm )
1369             mT    = numpy.linalg.inv( numpy.identity(__m) + mS.T @ mS )
1370             vw    = mT @ mS.T @ delta
1371             #
1372             Tdemi = numpy.real(scipy.linalg.sqrtm(mT))
1373             mU    = numpy.identity(__m)
1374             wTU   = (vw.reshape((__m,1)) + math.sqrt(__m-1) * Tdemi @ mU)
1375             #
1376             EX    = EnsembleOfAnomalies( EL, vEm, 1./math.sqrt(__m-1) )
1377             EL    = vEm + EX @ wTU
1378             #
1379             sEL[LagL] = EL
1380             for irl in range(LagL): # Lissage des L précédentes analysis
1381                 vEm = sEL[irl].mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1382                 EX = EnsembleOfAnomalies( sEL[irl], vEm, 1./math.sqrt(__m-1) )
1383                 sEL[irl] = vEm + EX @ wTU
1384             #
1385             # Conservation de l'analyse retrospective d'ordre 0 avant rotation
1386             Xa = sEL[0].mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1387             if selfA._toStore("APosterioriCovariance"):
1388                 EXn = sEL[0]
1389             #
1390             for irl in range(LagL):
1391                 sEL[irl] = sEL[irl+1]
1392             sEL[LagL] = None
1393         #--------------------------
1394         else:
1395             raise ValueError("VariantM has to be chosen in the authorized methods list.")
1396         #
1397         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1398         # ---> avec analysis
1399         selfA.StoredVariables["Analysis"].store( Xa )
1400         if selfA._toStore("APosterioriCovariance"):
1401             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(EXn) )
1402     #
1403     # Stockage des dernières analyses incomplètement remises à jour
1404     for irl in range(LagL):
1405         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1406         Xa = sEL[irl].mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1407         selfA.StoredVariables["Analysis"].store( Xa )
1408     #
1409     return 0
1410
1411 # ==============================================================================
1412 def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"):
1413     """
1414     Ensemble-Transform EnKF
1415     """
1416     if selfA._parameters["EstimationOf"] == "Parameters":
1417         selfA._parameters["StoreInternalVariables"] = True
1418     #
1419     # Opérateurs
1420     H = HO["Direct"].appliedControledFormTo
1421     #
1422     if selfA._parameters["EstimationOf"] == "State":
1423         M = EM["Direct"].appliedControledFormTo
1424     #
1425     if CM is not None and "Tangent" in CM and U is not None:
1426         Cm = CM["Tangent"].asMatrix(Xb)
1427     else:
1428         Cm = None
1429     #
1430     # Durée d'observation et tailles
1431     if hasattr(Y,"stepnumber"):
1432         duration = Y.stepnumber()
1433         __p = numpy.cumprod(Y.shape())[-1]
1434     else:
1435         duration = 2
1436         __p = numpy.array(Y).size
1437     #
1438     # Précalcul des inversions de B et R
1439     if selfA._parameters["StoreInternalVariables"] \
1440         or selfA._toStore("CostFunctionJ") \
1441         or selfA._toStore("CostFunctionJb") \
1442         or selfA._toStore("CostFunctionJo") \
1443         or selfA._toStore("CurrentOptimum") \
1444         or selfA._toStore("APosterioriCovariance"):
1445         BI = B.getI()
1446         RI = R.getI()
1447     elif VariantM != "KalmanFilterFormula":
1448         RI = R.getI()
1449     if VariantM == "KalmanFilterFormula":
1450         RIdemi = R.sqrtmI()
1451     #
1452     __n = Xb.size
1453     __m = selfA._parameters["NumberOfMembers"]
1454     #
1455     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
1456         Xn = EnsembleOfBackgroundPerturbations( Xb, None, __m )
1457         selfA.StoredVariables["Analysis"].store( Xb )
1458         if selfA._toStore("APosterioriCovariance"):
1459             if hasattr(B,"asfullmatrix"):
1460                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
1461             else:
1462                 selfA.StoredVariables["APosterioriCovariance"].store( B )
1463         selfA._setInternalState("seed", numpy.random.get_state())
1464     elif selfA._parameters["nextStep"]:
1465         Xn = selfA._getInternalState("Xn")
1466     #
1467     previousJMinimum = numpy.finfo(float).max
1468     #
1469     for step in range(duration-1):
1470         numpy.random.set_state(selfA._getInternalState("seed"))
1471         if hasattr(Y,"store"):
1472             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
1473         else:
1474             Ynpu = numpy.ravel( Y ).reshape((__p,1))
1475         #
1476         if U is not None:
1477             if hasattr(U,"store") and len(U)>1:
1478                 Un = numpy.ravel( U[step] ).reshape((-1,1))
1479             elif hasattr(U,"store") and len(U)==1:
1480                 Un = numpy.ravel( U[0] ).reshape((-1,1))
1481             else:
1482                 Un = numpy.ravel( U ).reshape((-1,1))
1483         else:
1484             Un = None
1485         #
1486         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
1487             Xn = CovarianceInflation( Xn,
1488                 selfA._parameters["InflationType"],
1489                 selfA._parameters["InflationFactor"],
1490                 )
1491         #
1492         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
1493             EMX = M( [(Xn[:,i], Un) for i in range(__m)],
1494                 argsAsSerie = True,
1495                 returnSerieAsArrayMatrix = True )
1496             Xn_predicted = EnsemblePerturbationWithGivenCovariance( EMX, Q )
1497             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
1498                 argsAsSerie = True,
1499                 returnSerieAsArrayMatrix = True )
1500             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
1501                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
1502                 Xn_predicted = Xn_predicted + Cm * Un
1503         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
1504             # --- > Par principe, M = Id, Q = 0
1505             Xn_predicted = EMX = Xn
1506             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
1507                 argsAsSerie = True,
1508                 returnSerieAsArrayMatrix = True )
1509         #
1510         # Mean of forecast and observation of forecast
1511         Xfm  = Xn_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1512         Hfm  = HX_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
1513         #
1514         # Anomalies
1515         EaX   = EnsembleOfAnomalies( Xn_predicted, Xfm )
1516         EaHX  = EnsembleOfAnomalies( HX_predicted, Hfm)
1517         #
1518         #--------------------------
1519         if VariantM == "KalmanFilterFormula":
1520             mS    = RIdemi * EaHX / math.sqrt(__m-1)
1521             mS    = mS.reshape((-1,__m)) # Pour dimension 1
1522             delta = RIdemi * ( Ynpu - Hfm )
1523             mT    = numpy.linalg.inv( numpy.identity(__m) + mS.T @ mS )
1524             vw    = mT @ mS.T @ delta
1525             #
1526             Tdemi = numpy.real(scipy.linalg.sqrtm(mT))
1527             mU    = numpy.identity(__m)
1528             #
1529             EaX   = EaX / math.sqrt(__m-1)
1530             Xn    = Xfm + EaX @ ( vw.reshape((__m,1)) + math.sqrt(__m-1) * Tdemi @ mU )
1531         #--------------------------
1532         elif VariantM == "Variational":
1533             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
1534             def CostFunction(w):
1535                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1536                 _Jo = 0.5 * _A.T @ (RI * _A)
1537                 _Jb = 0.5 * (__m-1) * w.T @ w
1538                 _J  = _Jo + _Jb
1539                 return float(_J)
1540             def GradientOfCostFunction(w):
1541                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1542                 _GardJo = - EaHX.T @ (RI * _A)
1543                 _GradJb = (__m-1) * w.reshape((__m,1))
1544                 _GradJ  = _GardJo + _GradJb
1545                 return numpy.ravel(_GradJ)
1546             vw = scipy.optimize.fmin_cg(
1547                 f           = CostFunction,
1548                 x0          = numpy.zeros(__m),
1549                 fprime      = GradientOfCostFunction,
1550                 args        = (),
1551                 disp        = False,
1552                 )
1553             #
1554             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1555             Htb = (__m-1) * numpy.identity(__m)
1556             Hta = Hto + Htb
1557             #
1558             Pta = numpy.linalg.inv( Hta )
1559             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1560             #
1561             Xn  = Xfm + EaX @ (vw[:,None] + EWa)
1562         #--------------------------
1563         elif VariantM == "FiniteSize11": # Jauge Boc2011
1564             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
1565             def CostFunction(w):
1566                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1567                 _Jo = 0.5 * _A.T @ (RI * _A)
1568                 _Jb = 0.5 * __m * math.log(1 + 1/__m + w.T @ w)
1569                 _J  = _Jo + _Jb
1570                 return float(_J)
1571             def GradientOfCostFunction(w):
1572                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1573                 _GardJo = - EaHX.T @ (RI * _A)
1574                 _GradJb = __m * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w)
1575                 _GradJ  = _GardJo + _GradJb
1576                 return numpy.ravel(_GradJ)
1577             vw = scipy.optimize.fmin_cg(
1578                 f           = CostFunction,
1579                 x0          = numpy.zeros(__m),
1580                 fprime      = GradientOfCostFunction,
1581                 args        = (),
1582                 disp        = False,
1583                 )
1584             #
1585             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1586             Htb = __m * \
1587                 ( (1 + 1/__m + vw.T @ vw) * numpy.identity(__m) - 2 * vw @ vw.T ) \
1588                 / (1 + 1/__m + vw.T @ vw)**2
1589             Hta = Hto + Htb
1590             #
1591             Pta = numpy.linalg.inv( Hta )
1592             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1593             #
1594             Xn  = Xfm + EaX @ (vw.reshape((__m,1)) + EWa)
1595         #--------------------------
1596         elif VariantM == "FiniteSize15": # Jauge Boc2015
1597             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
1598             def CostFunction(w):
1599                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1600                 _Jo = 0.5 * _A.T * RI * _A
1601                 _Jb = 0.5 * (__m+1) * math.log(1 + 1/__m + w.T @ w)
1602                 _J  = _Jo + _Jb
1603                 return float(_J)
1604             def GradientOfCostFunction(w):
1605                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1606                 _GardJo = - EaHX.T @ (RI * _A)
1607                 _GradJb = (__m+1) * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w)
1608                 _GradJ  = _GardJo + _GradJb
1609                 return numpy.ravel(_GradJ)
1610             vw = scipy.optimize.fmin_cg(
1611                 f           = CostFunction,
1612                 x0          = numpy.zeros(__m),
1613                 fprime      = GradientOfCostFunction,
1614                 args        = (),
1615                 disp        = False,
1616                 )
1617             #
1618             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1619             Htb = (__m+1) * \
1620                 ( (1 + 1/__m + vw.T @ vw) * numpy.identity(__m) - 2 * vw @ vw.T ) \
1621                 / (1 + 1/__m + vw.T @ vw)**2
1622             Hta = Hto + Htb
1623             #
1624             Pta = numpy.linalg.inv( Hta )
1625             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1626             #
1627             Xn  = Xfm + EaX @ (vw.reshape((__m,1)) + EWa)
1628         #--------------------------
1629         elif VariantM == "FiniteSize16": # Jauge Boc2016
1630             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
1631             def CostFunction(w):
1632                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1633                 _Jo = 0.5 * _A.T @ (RI * _A)
1634                 _Jb = 0.5 * (__m+1) * math.log(1 + 1/__m + w.T @ w / (__m-1))
1635                 _J  = _Jo + _Jb
1636                 return float(_J)
1637             def GradientOfCostFunction(w):
1638                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1639                 _GardJo = - EaHX.T @ (RI * _A)
1640                 _GradJb = ((__m+1) / (__m-1)) * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w / (__m-1))
1641                 _GradJ  = _GardJo + _GradJb
1642                 return numpy.ravel(_GradJ)
1643             vw = scipy.optimize.fmin_cg(
1644                 f           = CostFunction,
1645                 x0          = numpy.zeros(__m),
1646                 fprime      = GradientOfCostFunction,
1647                 args        = (),
1648                 disp        = False,
1649                 )
1650             #
1651             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1652             Htb = ((__m+1) / (__m-1)) * \
1653                 ( (1 + 1/__m + vw.T @ vw / (__m-1)) * numpy.identity(__m) - 2 * vw @ vw.T / (__m-1) ) \
1654                 / (1 + 1/__m + vw.T @ vw / (__m-1))**2
1655             Hta = Hto + Htb
1656             #
1657             Pta = numpy.linalg.inv( Hta )
1658             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1659             #
1660             Xn  = Xfm + EaX @ (vw[:,None] + EWa)
1661         #--------------------------
1662         else:
1663             raise ValueError("VariantM has to be chosen in the authorized methods list.")
1664         #
1665         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
1666             Xn = CovarianceInflation( Xn,
1667                 selfA._parameters["InflationType"],
1668                 selfA._parameters["InflationFactor"],
1669                 )
1670         #
1671         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1672         #--------------------------
1673         selfA._setInternalState("Xn", Xn)
1674         selfA._setInternalState("seed", numpy.random.get_state())
1675         #--------------------------
1676         #
1677         if selfA._parameters["StoreInternalVariables"] \
1678             or selfA._toStore("CostFunctionJ") \
1679             or selfA._toStore("CostFunctionJb") \
1680             or selfA._toStore("CostFunctionJo") \
1681             or selfA._toStore("APosterioriCovariance") \
1682             or selfA._toStore("InnovationAtCurrentAnalysis") \
1683             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
1684             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1685             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
1686             _Innovation = Ynpu - _HXa
1687         #
1688         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1689         # ---> avec analysis
1690         selfA.StoredVariables["Analysis"].store( Xa )
1691         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
1692             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
1693         if selfA._toStore("InnovationAtCurrentAnalysis"):
1694             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
1695         # ---> avec current state
1696         if selfA._parameters["StoreInternalVariables"] \
1697             or selfA._toStore("CurrentState"):
1698             selfA.StoredVariables["CurrentState"].store( Xn )
1699         if selfA._toStore("ForecastState"):
1700             selfA.StoredVariables["ForecastState"].store( EMX )
1701         if selfA._toStore("ForecastCovariance"):
1702             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(EMX) )
1703         if selfA._toStore("BMA"):
1704             selfA.StoredVariables["BMA"].store( EMX - Xa )
1705         if selfA._toStore("InnovationAtCurrentState"):
1706             selfA.StoredVariables["InnovationAtCurrentState"].store( - HX_predicted + Ynpu )
1707         if selfA._toStore("SimulatedObservationAtCurrentState") \
1708             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1709             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
1710         # ---> autres
1711         if selfA._parameters["StoreInternalVariables"] \
1712             or selfA._toStore("CostFunctionJ") \
1713             or selfA._toStore("CostFunctionJb") \
1714             or selfA._toStore("CostFunctionJo") \
1715             or selfA._toStore("CurrentOptimum") \
1716             or selfA._toStore("APosterioriCovariance"):
1717             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
1718             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
1719             J   = Jb + Jo
1720             selfA.StoredVariables["CostFunctionJb"].store( Jb )
1721             selfA.StoredVariables["CostFunctionJo"].store( Jo )
1722             selfA.StoredVariables["CostFunctionJ" ].store( J )
1723             #
1724             if selfA._toStore("IndexOfOptimum") \
1725                 or selfA._toStore("CurrentOptimum") \
1726                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
1727                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
1728                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
1729                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1730                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1731             if selfA._toStore("IndexOfOptimum"):
1732                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1733             if selfA._toStore("CurrentOptimum"):
1734                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
1735             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1736                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
1737             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1738                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1739             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1740                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1741             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1742                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1743         if selfA._toStore("APosterioriCovariance"):
1744             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
1745         if selfA._parameters["EstimationOf"] == "Parameters" \
1746             and J < previousJMinimum:
1747             previousJMinimum    = J
1748             XaMin               = Xa
1749             if selfA._toStore("APosterioriCovariance"):
1750                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
1751         # ---> Pour les smoothers
1752         if selfA._toStore("CurrentEnsembleState"):
1753             selfA.StoredVariables["CurrentEnsembleState"].store( Xn )
1754     #
1755     # Stockage final supplémentaire de l'optimum en estimation de paramètres
1756     # ----------------------------------------------------------------------
1757     if selfA._parameters["EstimationOf"] == "Parameters":
1758         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1759         selfA.StoredVariables["Analysis"].store( XaMin )
1760         if selfA._toStore("APosterioriCovariance"):
1761             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
1762         if selfA._toStore("BMA"):
1763             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
1764     #
1765     return 0
1766
1767 # ==============================================================================
1768 def exkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
1769     """
1770     Extended Kalman Filter
1771     """
1772     if selfA._parameters["EstimationOf"] == "Parameters":
1773         selfA._parameters["StoreInternalVariables"] = True
1774     #
1775     # Opérateurs
1776     H = HO["Direct"].appliedControledFormTo
1777     #
1778     if selfA._parameters["EstimationOf"] == "State":
1779         M = EM["Direct"].appliedControledFormTo
1780     #
1781     if CM is not None and "Tangent" in CM and U is not None:
1782         Cm = CM["Tangent"].asMatrix(Xb)
1783     else:
1784         Cm = None
1785     #
1786     # Durée d'observation et tailles
1787     if hasattr(Y,"stepnumber"):
1788         duration = Y.stepnumber()
1789         __p = numpy.cumprod(Y.shape())[-1]
1790     else:
1791         duration = 2
1792         __p = numpy.array(Y).size
1793     #
1794     # Précalcul des inversions de B et R
1795     if selfA._parameters["StoreInternalVariables"] \
1796         or selfA._toStore("CostFunctionJ") \
1797         or selfA._toStore("CostFunctionJb") \
1798         or selfA._toStore("CostFunctionJo") \
1799         or selfA._toStore("CurrentOptimum") \
1800         or selfA._toStore("APosterioriCovariance"):
1801         BI = B.getI()
1802         RI = R.getI()
1803     #
1804     __n = Xb.size
1805     #
1806     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
1807         Xn = Xb
1808         Pn = B
1809         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1810         selfA.StoredVariables["Analysis"].store( Xb )
1811         if selfA._toStore("APosterioriCovariance"):
1812             if hasattr(B,"asfullmatrix"):
1813                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
1814             else:
1815                 selfA.StoredVariables["APosterioriCovariance"].store( B )
1816         selfA._setInternalState("seed", numpy.random.get_state())
1817     elif selfA._parameters["nextStep"]:
1818         Xn = selfA._getInternalState("Xn")
1819         Pn = selfA._getInternalState("Pn")
1820     #
1821     if selfA._parameters["EstimationOf"] == "Parameters":
1822         XaMin            = Xn
1823         previousJMinimum = numpy.finfo(float).max
1824     #
1825     for step in range(duration-1):
1826         if hasattr(Y,"store"):
1827             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
1828         else:
1829             Ynpu = numpy.ravel( Y ).reshape((__p,1))
1830         #
1831         Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xn)
1832         Ht = Ht.reshape(Ynpu.size,Xn.size) # ADAO & check shape
1833         Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = Xn)
1834         Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape
1835         #
1836         if selfA._parameters["EstimationOf"] == "State":
1837             Mt = EM["Tangent"].asMatrix(ValueForMethodForm = Xn)
1838             Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape
1839             Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = Xn)
1840             Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape
1841         #
1842         if U is not None:
1843             if hasattr(U,"store") and len(U)>1:
1844                 Un = numpy.ravel( U[step] ).reshape((-1,1))
1845             elif hasattr(U,"store") and len(U)==1:
1846                 Un = numpy.ravel( U[0] ).reshape((-1,1))
1847             else:
1848                 Un = numpy.ravel( U ).reshape((-1,1))
1849         else:
1850             Un = None
1851         #
1852         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
1853             Xn_predicted = numpy.ravel( M( (Xn, Un) ) ).reshape((__n,1))
1854             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
1855                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
1856                 Xn_predicted = Xn_predicted + Cm * Un
1857             Pn_predicted = Q + Mt * (Pn * Ma)
1858         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
1859             # --- > Par principe, M = Id, Q = 0
1860             Xn_predicted = Xn
1861             Pn_predicted = Pn
1862         #
1863         if selfA._parameters["EstimationOf"] == "State":
1864             HX_predicted = numpy.ravel( H( (Xn_predicted, None) ) ).reshape((__p,1))
1865             _Innovation  = Ynpu - HX_predicted
1866         elif selfA._parameters["EstimationOf"] == "Parameters":
1867             HX_predicted = numpy.ravel( H( (Xn_predicted, Un) ) ).reshape((__p,1))
1868             _Innovation  = Ynpu - HX_predicted
1869             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
1870                 _Innovation = _Innovation - Cm * Un
1871         #
1872         Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
1873         Xn = Xn_predicted + Kn * _Innovation
1874         Pn = Pn_predicted - Kn * Ht * Pn_predicted
1875         #
1876         Xa = Xn # Pointeurs
1877         #--------------------------
1878         selfA._setInternalState("Xn", Xn)
1879         selfA._setInternalState("Pn", Pn)
1880         #--------------------------
1881         #
1882         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1883         # ---> avec analysis
1884         selfA.StoredVariables["Analysis"].store( Xa )
1885         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
1886             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( H((Xa, Un)) )
1887         if selfA._toStore("InnovationAtCurrentAnalysis"):
1888             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
1889         # ---> avec current state
1890         if selfA._parameters["StoreInternalVariables"] \
1891             or selfA._toStore("CurrentState"):
1892             selfA.StoredVariables["CurrentState"].store( Xn )
1893         if selfA._toStore("ForecastState"):
1894             selfA.StoredVariables["ForecastState"].store( Xn_predicted )
1895         if selfA._toStore("ForecastCovariance"):
1896             selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted )
1897         if selfA._toStore("BMA"):
1898             selfA.StoredVariables["BMA"].store( Xn_predicted - Xa )
1899         if selfA._toStore("InnovationAtCurrentState"):
1900             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
1901         if selfA._toStore("SimulatedObservationAtCurrentState") \
1902             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1903             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
1904         # ---> autres
1905         if selfA._parameters["StoreInternalVariables"] \
1906             or selfA._toStore("CostFunctionJ") \
1907             or selfA._toStore("CostFunctionJb") \
1908             or selfA._toStore("CostFunctionJo") \
1909             or selfA._toStore("CurrentOptimum") \
1910             or selfA._toStore("APosterioriCovariance"):
1911             Jb  = float( 0.5 * (Xa - Xb).T @ (BI @ (Xa - Xb)) )
1912             Jo  = float( 0.5 * _Innovation.T @ (RI @ _Innovation) )
1913             J   = Jb + Jo
1914             selfA.StoredVariables["CostFunctionJb"].store( Jb )
1915             selfA.StoredVariables["CostFunctionJo"].store( Jo )
1916             selfA.StoredVariables["CostFunctionJ" ].store( J )
1917             #
1918             if selfA._toStore("IndexOfOptimum") \
1919                 or selfA._toStore("CurrentOptimum") \
1920                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
1921                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
1922                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
1923                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1924                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1925             if selfA._toStore("IndexOfOptimum"):
1926                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1927             if selfA._toStore("CurrentOptimum"):
1928                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
1929             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1930                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
1931             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1932                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1933             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1934                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1935             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1936                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1937         if selfA._toStore("APosterioriCovariance"):
1938             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
1939         if selfA._parameters["EstimationOf"] == "Parameters" \
1940             and J < previousJMinimum:
1941             previousJMinimum    = J
1942             XaMin               = Xa
1943             if selfA._toStore("APosterioriCovariance"):
1944                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
1945     #
1946     # Stockage final supplémentaire de l'optimum en estimation de paramètres
1947     # ----------------------------------------------------------------------
1948     if selfA._parameters["EstimationOf"] == "Parameters":
1949         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1950         selfA.StoredVariables["Analysis"].store( XaMin )
1951         if selfA._toStore("APosterioriCovariance"):
1952             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
1953         if selfA._toStore("BMA"):
1954             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
1955     #
1956     return 0
1957
1958 # ==============================================================================
1959 def ienkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="IEnKF12",
1960     BnotT=False, _epsilon=1.e-3, _e=1.e-7, _jmax=15000):
1961     """
1962     Iterative EnKF
1963     """
1964     if selfA._parameters["EstimationOf"] == "Parameters":
1965         selfA._parameters["StoreInternalVariables"] = True
1966     #
1967     # Opérateurs
1968     H = HO["Direct"].appliedControledFormTo
1969     #
1970     if selfA._parameters["EstimationOf"] == "State":
1971         M = EM["Direct"].appliedControledFormTo
1972     #
1973     if CM is not None and "Tangent" in CM and U is not None:
1974         Cm = CM["Tangent"].asMatrix(Xb)
1975     else:
1976         Cm = None
1977     #
1978     # Durée d'observation et tailles
1979     if hasattr(Y,"stepnumber"):
1980         duration = Y.stepnumber()
1981         __p = numpy.cumprod(Y.shape())[-1]
1982     else:
1983         duration = 2
1984         __p = numpy.array(Y).size
1985     #
1986     # Précalcul des inversions de B et R
1987     if selfA._parameters["StoreInternalVariables"] \
1988         or selfA._toStore("CostFunctionJ") \
1989         or selfA._toStore("CostFunctionJb") \
1990         or selfA._toStore("CostFunctionJo") \
1991         or selfA._toStore("CurrentOptimum") \
1992         or selfA._toStore("APosterioriCovariance"):
1993         BI = B.getI()
1994     RI = R.getI()
1995     #
1996     __n = Xb.size
1997     __m = selfA._parameters["NumberOfMembers"]
1998     #
1999     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
2000         if hasattr(B,"asfullmatrix"): Pn = B.asfullmatrix(__n)
2001         else:                         Pn = B
2002         Xn = EnsembleOfBackgroundPerturbations( Xb, Pn, __m )
2003         selfA.StoredVariables["Analysis"].store( Xb )
2004         if selfA._toStore("APosterioriCovariance"):
2005             if hasattr(B,"asfullmatrix"):
2006                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
2007             else:
2008                 selfA.StoredVariables["APosterioriCovariance"].store( B )
2009         selfA._setInternalState("seed", numpy.random.get_state())
2010     elif selfA._parameters["nextStep"]:
2011         Xn = selfA._getInternalState("Xn")
2012     #
2013     previousJMinimum = numpy.finfo(float).max
2014     #
2015     for step in range(duration-1):
2016         numpy.random.set_state(selfA._getInternalState("seed"))
2017         if hasattr(Y,"store"):
2018             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
2019         else:
2020             Ynpu = numpy.ravel( Y ).reshape((__p,1))
2021         #
2022         if U is not None:
2023             if hasattr(U,"store") and len(U)>1:
2024                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
2025             elif hasattr(U,"store") and len(U)==1:
2026                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
2027             else:
2028                 Un = numpy.asmatrix(numpy.ravel( U )).T
2029         else:
2030             Un = None
2031         #
2032         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
2033             Xn = CovarianceInflation( Xn,
2034                 selfA._parameters["InflationType"],
2035                 selfA._parameters["InflationFactor"],
2036                 )
2037         #
2038         #--------------------------
2039         if VariantM == "IEnKF12":
2040             Xfm = numpy.ravel(Xn.mean(axis=1, dtype=mfp).astype('float'))
2041             EaX = EnsembleOfAnomalies( Xn ) / math.sqrt(__m-1)
2042             __j = 0
2043             Deltaw = 1
2044             if not BnotT:
2045                 Ta  = numpy.identity(__m)
2046             vw  = numpy.zeros(__m)
2047             while numpy.linalg.norm(Deltaw) >= _e and __j <= _jmax:
2048                 vx1 = (Xfm + EaX @ vw).reshape((__n,1))
2049                 #
2050                 if BnotT:
2051                     E1 = vx1 + _epsilon * EaX
2052                 else:
2053                     E1 = vx1 + math.sqrt(__m-1) * EaX @ Ta
2054                 #
2055                 if selfA._parameters["EstimationOf"] == "State": # Forecast + Q
2056                     E2 = M( [(E1[:,i,numpy.newaxis], Un) for i in range(__m)],
2057                         argsAsSerie = True,
2058                         returnSerieAsArrayMatrix = True )
2059                 elif selfA._parameters["EstimationOf"] == "Parameters":
2060                     # --- > Par principe, M = Id
2061                     E2 = Xn
2062                 vx2 = E2.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
2063                 vy1 = H((vx2, Un)).reshape((__p,1))
2064                 #
2065                 HE2 = H( [(E2[:,i,numpy.newaxis], Un) for i in range(__m)],
2066                     argsAsSerie = True,
2067                     returnSerieAsArrayMatrix = True )
2068                 vy2 = HE2.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
2069                 #
2070                 if BnotT:
2071                     EaY = (HE2 - vy2) / _epsilon
2072                 else:
2073                     EaY = ( (HE2 - vy2) @ numpy.linalg.inv(Ta) ) / math.sqrt(__m-1)
2074                 #
2075                 GradJ = numpy.ravel(vw[:,None] - EaY.transpose() @ (RI * ( Ynpu - vy1 )))
2076                 mH = numpy.identity(__m) + EaY.transpose() @ (RI * EaY).reshape((-1,__m))
2077                 Deltaw = - numpy.linalg.solve(mH,GradJ)
2078                 #
2079                 vw = vw + Deltaw
2080                 #
2081                 if not BnotT:
2082                     Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
2083                 #
2084                 __j = __j + 1
2085             #
2086             A2 = EnsembleOfAnomalies( E2 )
2087             #
2088             if BnotT:
2089                 Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
2090                 A2 = math.sqrt(__m-1) * A2 @ Ta / _epsilon
2091             #
2092             Xn = vx2 + A2
2093         #--------------------------
2094         else:
2095             raise ValueError("VariantM has to be chosen in the authorized methods list.")
2096         #
2097         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
2098             Xn = CovarianceInflation( Xn,
2099                 selfA._parameters["InflationType"],
2100                 selfA._parameters["InflationFactor"],
2101                 )
2102         #
2103         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
2104         #--------------------------
2105         selfA._setInternalState("Xn", Xn)
2106         selfA._setInternalState("seed", numpy.random.get_state())
2107         #--------------------------
2108         #
2109         if selfA._parameters["StoreInternalVariables"] \
2110             or selfA._toStore("CostFunctionJ") \
2111             or selfA._toStore("CostFunctionJb") \
2112             or selfA._toStore("CostFunctionJo") \
2113             or selfA._toStore("APosterioriCovariance") \
2114             or selfA._toStore("InnovationAtCurrentAnalysis") \
2115             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
2116             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2117             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
2118             _Innovation = Ynpu - _HXa
2119         #
2120         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2121         # ---> avec analysis
2122         selfA.StoredVariables["Analysis"].store( Xa )
2123         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
2124             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
2125         if selfA._toStore("InnovationAtCurrentAnalysis"):
2126             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
2127         # ---> avec current state
2128         if selfA._parameters["StoreInternalVariables"] \
2129             or selfA._toStore("CurrentState"):
2130             selfA.StoredVariables["CurrentState"].store( Xn )
2131         if selfA._toStore("ForecastState"):
2132             selfA.StoredVariables["ForecastState"].store( E2 )
2133         if selfA._toStore("ForecastCovariance"):
2134             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(E2) )
2135         if selfA._toStore("BMA"):
2136             selfA.StoredVariables["BMA"].store( E2 - Xa )
2137         if selfA._toStore("InnovationAtCurrentState"):
2138             selfA.StoredVariables["InnovationAtCurrentState"].store( - HE2 + Ynpu )
2139         if selfA._toStore("SimulatedObservationAtCurrentState") \
2140             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2141             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HE2 )
2142         # ---> autres
2143         if selfA._parameters["StoreInternalVariables"] \
2144             or selfA._toStore("CostFunctionJ") \
2145             or selfA._toStore("CostFunctionJb") \
2146             or selfA._toStore("CostFunctionJo") \
2147             or selfA._toStore("CurrentOptimum") \
2148             or selfA._toStore("APosterioriCovariance"):
2149             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
2150             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
2151             J   = Jb + Jo
2152             selfA.StoredVariables["CostFunctionJb"].store( Jb )
2153             selfA.StoredVariables["CostFunctionJo"].store( Jo )
2154             selfA.StoredVariables["CostFunctionJ" ].store( J )
2155             #
2156             if selfA._toStore("IndexOfOptimum") \
2157                 or selfA._toStore("CurrentOptimum") \
2158                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
2159                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
2160                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
2161                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2162                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2163             if selfA._toStore("IndexOfOptimum"):
2164                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
2165             if selfA._toStore("CurrentOptimum"):
2166                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
2167             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2168                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
2169             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
2170                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
2171             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
2172                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
2173             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
2174                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
2175         if selfA._toStore("APosterioriCovariance"):
2176             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
2177         if selfA._parameters["EstimationOf"] == "Parameters" \
2178             and J < previousJMinimum:
2179             previousJMinimum    = J
2180             XaMin               = Xa
2181             if selfA._toStore("APosterioriCovariance"):
2182                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
2183         # ---> Pour les smoothers
2184         if selfA._toStore("CurrentEnsembleState"):
2185             selfA.StoredVariables["CurrentEnsembleState"].store( Xn )
2186     #
2187     # Stockage final supplémentaire de l'optimum en estimation de paramètres
2188     # ----------------------------------------------------------------------
2189     if selfA._parameters["EstimationOf"] == "Parameters":
2190         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2191         selfA.StoredVariables["Analysis"].store( XaMin )
2192         if selfA._toStore("APosterioriCovariance"):
2193             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
2194         if selfA._toStore("BMA"):
2195             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
2196     #
2197     return 0
2198
2199 # ==============================================================================
2200 def incr3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
2201     """
2202     3DVAR incrémental
2203     """
2204     #
2205     # Initialisations
2206     # ---------------
2207     Hm = HO["Direct"].appliedTo
2208     #
2209     BI = B.getI()
2210     RI = R.getI()
2211     #
2212     Xini = selfA._parameters["InitializationPoint"]
2213     #
2214     HXb = numpy.asmatrix(numpy.ravel( Hm( Xb ) )).T
2215     Innovation = Y - HXb
2216     #
2217     # Outer Loop
2218     # ----------
2219     iOuter = 0
2220     J      = 1./mpr
2221     DeltaJ = 1./mpr
2222     Xr     = Xini.reshape((-1,1))
2223     while abs(DeltaJ) >= selfA._parameters["CostDecrementTolerance"] and iOuter <= selfA._parameters["MaximumNumberOfSteps"]:
2224         #
2225         # Inner Loop
2226         # ----------
2227         Ht = HO["Tangent"].asMatrix(Xr)
2228         Ht = Ht.reshape(Y.size,Xr.size) # ADAO & check shape
2229         #
2230         # Définition de la fonction-coût
2231         # ------------------------------
2232         def CostFunction(dx):
2233             _dX  = numpy.asmatrix(numpy.ravel( dx )).T
2234             if selfA._parameters["StoreInternalVariables"] or \
2235                 selfA._toStore("CurrentState") or \
2236                 selfA._toStore("CurrentOptimum"):
2237                 selfA.StoredVariables["CurrentState"].store( Xb + _dX )
2238             _HdX = Ht * _dX
2239             _HdX = numpy.asmatrix(numpy.ravel( _HdX )).T
2240             _dInnovation = Innovation - _HdX
2241             if selfA._toStore("SimulatedObservationAtCurrentState") or \
2242                 selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2243                 selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HXb + _HdX )
2244             if selfA._toStore("InnovationAtCurrentState"):
2245                 selfA.StoredVariables["InnovationAtCurrentState"].store( _dInnovation )
2246             #
2247             Jb  = float( 0.5 * _dX.T * BI * _dX )
2248             Jo  = float( 0.5 * _dInnovation.T * RI * _dInnovation )
2249             J   = Jb + Jo
2250             #
2251             selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
2252             selfA.StoredVariables["CostFunctionJb"].store( Jb )
2253             selfA.StoredVariables["CostFunctionJo"].store( Jo )
2254             selfA.StoredVariables["CostFunctionJ" ].store( J )
2255             if selfA._toStore("IndexOfOptimum") or \
2256                 selfA._toStore("CurrentOptimum") or \
2257                 selfA._toStore("CostFunctionJAtCurrentOptimum") or \
2258                 selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
2259                 selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
2260                 selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2261                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2262             if selfA._toStore("IndexOfOptimum"):
2263                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
2264             if selfA._toStore("CurrentOptimum"):
2265                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
2266             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2267                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
2268             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
2269                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
2270             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
2271                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
2272             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
2273                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
2274             return J
2275         #
2276         def GradientOfCostFunction(dx):
2277             _dX          = numpy.asmatrix(numpy.ravel( dx )).T
2278             _HdX         = Ht * _dX
2279             _HdX         = numpy.asmatrix(numpy.ravel( _HdX )).T
2280             _dInnovation = Innovation - _HdX
2281             GradJb       = BI * _dX
2282             GradJo       = - Ht.T @ (RI * _dInnovation)
2283             GradJ        = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
2284             return GradJ
2285         #
2286         # Minimisation de la fonctionnelle
2287         # --------------------------------
2288         nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
2289         #
2290         if selfA._parameters["Minimizer"] == "LBFGSB":
2291             # Minimum, J_optimal, Informations = scipy.optimize.fmin_l_bfgs_b(
2292             if "0.19" <= scipy.version.version <= "1.1.0":
2293                 import lbfgsbhlt as optimiseur
2294             else:
2295                 import scipy.optimize as optimiseur
2296             Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
2297                 func        = CostFunction,
2298                 x0          = numpy.zeros(Xini.size),
2299                 fprime      = GradientOfCostFunction,
2300                 args        = (),
2301                 bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
2302                 maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
2303                 factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
2304                 pgtol       = selfA._parameters["ProjectedGradientTolerance"],
2305                 iprint      = selfA._parameters["optiprint"],
2306                 )
2307             nfeval = Informations['funcalls']
2308             rc     = Informations['warnflag']
2309         elif selfA._parameters["Minimizer"] == "TNC":
2310             Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
2311                 func        = CostFunction,
2312                 x0          = numpy.zeros(Xini.size),
2313                 fprime      = GradientOfCostFunction,
2314                 args        = (),
2315                 bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
2316                 maxfun      = selfA._parameters["MaximumNumberOfSteps"],
2317                 pgtol       = selfA._parameters["ProjectedGradientTolerance"],
2318                 ftol        = selfA._parameters["CostDecrementTolerance"],
2319                 messages    = selfA._parameters["optmessages"],
2320                 )
2321         elif selfA._parameters["Minimizer"] == "CG":
2322             Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
2323                 f           = CostFunction,
2324                 x0          = numpy.zeros(Xini.size),
2325                 fprime      = GradientOfCostFunction,
2326                 args        = (),
2327                 maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2328                 gtol        = selfA._parameters["GradientNormTolerance"],
2329                 disp        = selfA._parameters["optdisp"],
2330                 full_output = True,
2331                 )
2332         elif selfA._parameters["Minimizer"] == "NCG":
2333             Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
2334                 f           = CostFunction,
2335                 x0          = numpy.zeros(Xini.size),
2336                 fprime      = GradientOfCostFunction,
2337                 args        = (),
2338                 maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2339                 avextol     = selfA._parameters["CostDecrementTolerance"],
2340                 disp        = selfA._parameters["optdisp"],
2341                 full_output = True,
2342                 )
2343         elif selfA._parameters["Minimizer"] == "BFGS":
2344             Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
2345                 f           = CostFunction,
2346                 x0          = numpy.zeros(Xini.size),
2347                 fprime      = GradientOfCostFunction,
2348                 args        = (),
2349                 maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2350                 gtol        = selfA._parameters["GradientNormTolerance"],
2351                 disp        = selfA._parameters["optdisp"],
2352                 full_output = True,
2353                 )
2354         else:
2355             raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
2356         #
2357         IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2358         MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
2359         #
2360         if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
2361             Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
2362         else:
2363             Minimum = Xb + Minimum.reshape((-1,1))
2364         #
2365         Xr     = Minimum
2366         DeltaJ = selfA.StoredVariables["CostFunctionJ" ][-1] - J
2367         iOuter = selfA.StoredVariables["CurrentIterationNumber"][-1]
2368     #
2369     Xa = Xr
2370     #--------------------------
2371     #
2372     selfA.StoredVariables["Analysis"].store( Xa )
2373     #
2374     if selfA._toStore("OMA") or \
2375         selfA._toStore("SigmaObs2") or \
2376         selfA._toStore("SimulationQuantiles") or \
2377         selfA._toStore("SimulatedObservationAtOptimum"):
2378         if selfA._toStore("SimulatedObservationAtCurrentState"):
2379             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
2380         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2381             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
2382         else:
2383             HXa = Hm( Xa )
2384     #
2385     if selfA._toStore("APosterioriCovariance") or \
2386         selfA._toStore("SimulationQuantiles") or \
2387         selfA._toStore("JacobianMatrixAtOptimum") or \
2388         selfA._toStore("KalmanGainAtOptimum"):
2389         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
2390         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
2391     if selfA._toStore("APosterioriCovariance") or \
2392         selfA._toStore("SimulationQuantiles") or \
2393         selfA._toStore("KalmanGainAtOptimum"):
2394         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
2395         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
2396     if selfA._toStore("APosterioriCovariance") or \
2397         selfA._toStore("SimulationQuantiles"):
2398         A = HessienneEstimation(Xa.size, HaM, HtM, BI, RI)
2399     if selfA._toStore("APosterioriCovariance"):
2400         selfA.StoredVariables["APosterioriCovariance"].store( A )
2401     if selfA._toStore("JacobianMatrixAtOptimum"):
2402         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
2403     if selfA._toStore("KalmanGainAtOptimum"):
2404         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
2405         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
2406         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
2407     #
2408     # Calculs et/ou stockages supplémentaires
2409     # ---------------------------------------
2410     if selfA._toStore("Innovation") or \
2411         selfA._toStore("SigmaObs2") or \
2412         selfA._toStore("MahalanobisConsistency") or \
2413         selfA._toStore("OMB"):
2414         d  = Y - HXb
2415     if selfA._toStore("Innovation"):
2416         selfA.StoredVariables["Innovation"].store( d )
2417     if selfA._toStore("BMA"):
2418         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
2419     if selfA._toStore("OMA"):
2420         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
2421     if selfA._toStore("OMB"):
2422         selfA.StoredVariables["OMB"].store( d )
2423     if selfA._toStore("SigmaObs2"):
2424         TraceR = R.trace(Y.size)
2425         selfA.StoredVariables["SigmaObs2"].store( float( (d.T @ (numpy.ravel(Y)-numpy.ravel(HXa))) ) / TraceR )
2426     if selfA._toStore("MahalanobisConsistency"):
2427         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
2428     if selfA._toStore("SimulationQuantiles"):
2429         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
2430     if selfA._toStore("SimulatedObservationAtBackground"):
2431         selfA.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
2432     if selfA._toStore("SimulatedObservationAtOptimum"):
2433         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
2434     #
2435     return 0
2436
2437 # ==============================================================================
2438 def mlef(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="MLEF13",
2439     BnotT=False, _epsilon=1.e-3, _e=1.e-7, _jmax=15000):
2440     """
2441     Maximum Likelihood Ensemble Filter
2442     """
2443     if selfA._parameters["EstimationOf"] == "Parameters":
2444         selfA._parameters["StoreInternalVariables"] = True
2445     #
2446     # Opérateurs
2447     H = HO["Direct"].appliedControledFormTo
2448     #
2449     if selfA._parameters["EstimationOf"] == "State":
2450         M = EM["Direct"].appliedControledFormTo
2451     #
2452     if CM is not None and "Tangent" in CM and U is not None:
2453         Cm = CM["Tangent"].asMatrix(Xb)
2454     else:
2455         Cm = None
2456     #
2457     # Durée d'observation et tailles
2458     if hasattr(Y,"stepnumber"):
2459         duration = Y.stepnumber()
2460         __p = numpy.cumprod(Y.shape())[-1]
2461     else:
2462         duration = 2
2463         __p = numpy.array(Y).size
2464     #
2465     # Précalcul des inversions de B et R
2466     if selfA._parameters["StoreInternalVariables"] \
2467         or selfA._toStore("CostFunctionJ") \
2468         or selfA._toStore("CostFunctionJb") \
2469         or selfA._toStore("CostFunctionJo") \
2470         or selfA._toStore("CurrentOptimum") \
2471         or selfA._toStore("APosterioriCovariance"):
2472         BI = B.getI()
2473     RI = R.getI()
2474     #
2475     __n = Xb.size
2476     __m = selfA._parameters["NumberOfMembers"]
2477     #
2478     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
2479         Xn = EnsembleOfBackgroundPerturbations( Xb, None, __m )
2480         selfA.StoredVariables["Analysis"].store( Xb )
2481         if selfA._toStore("APosterioriCovariance"):
2482             if hasattr(B,"asfullmatrix"):
2483                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
2484             else:
2485                 selfA.StoredVariables["APosterioriCovariance"].store( B )
2486         selfA._setInternalState("seed", numpy.random.get_state())
2487     elif selfA._parameters["nextStep"]:
2488         Xn = selfA._getInternalState("Xn")
2489     #
2490     previousJMinimum = numpy.finfo(float).max
2491     #
2492     for step in range(duration-1):
2493         numpy.random.set_state(selfA._getInternalState("seed"))
2494         if hasattr(Y,"store"):
2495             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
2496         else:
2497             Ynpu = numpy.ravel( Y ).reshape((__p,1))
2498         #
2499         if U is not None:
2500             if hasattr(U,"store") and len(U)>1:
2501                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
2502             elif hasattr(U,"store") and len(U)==1:
2503                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
2504             else:
2505                 Un = numpy.asmatrix(numpy.ravel( U )).T
2506         else:
2507             Un = None
2508         #
2509         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
2510             Xn = CovarianceInflation( Xn,
2511                 selfA._parameters["InflationType"],
2512                 selfA._parameters["InflationFactor"],
2513                 )
2514         #
2515         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
2516             EMX = M( [(Xn[:,i], Un) for i in range(__m)],
2517                 argsAsSerie = True,
2518                 returnSerieAsArrayMatrix = True )
2519             Xn_predicted = EnsemblePerturbationWithGivenCovariance( EMX, Q )
2520             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
2521                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
2522                 Xn_predicted = Xn_predicted + Cm * Un
2523         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
2524             # --- > Par principe, M = Id, Q = 0
2525             Xn_predicted = EMX = Xn
2526         #
2527         #--------------------------
2528         if VariantM == "MLEF13":
2529             Xfm = numpy.ravel(Xn_predicted.mean(axis=1, dtype=mfp).astype('float'))
2530             EaX = EnsembleOfAnomalies( Xn_predicted, Xfm, 1./math.sqrt(__m-1) )
2531             Ua  = numpy.identity(__m)
2532             __j = 0
2533             Deltaw = 1
2534             if not BnotT:
2535                 Ta  = numpy.identity(__m)
2536             vw  = numpy.zeros(__m)
2537             while numpy.linalg.norm(Deltaw) >= _e and __j <= _jmax:
2538                 vx1 = (Xfm + EaX @ vw).reshape((__n,1))
2539                 #
2540                 if BnotT:
2541                     E1 = vx1 + _epsilon * EaX
2542                 else:
2543                     E1 = vx1 + math.sqrt(__m-1) * EaX @ Ta
2544                 #
2545                 HE2 = H( [(E1[:,i,numpy.newaxis], Un) for i in range(__m)],
2546                     argsAsSerie = True,
2547                     returnSerieAsArrayMatrix = True )
2548                 vy2 = HE2.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
2549                 #
2550                 if BnotT:
2551                     EaY = (HE2 - vy2) / _epsilon
2552                 else:
2553                     EaY = ( (HE2 - vy2) @ numpy.linalg.inv(Ta) ) / math.sqrt(__m-1)
2554                 #
2555                 GradJ = numpy.ravel(vw[:,None] - EaY.transpose() @ (RI * ( Ynpu - vy2 )))
2556                 mH = numpy.identity(__m) + EaY.transpose() @ (RI * EaY).reshape((-1,__m))
2557                 Deltaw = - numpy.linalg.solve(mH,GradJ)
2558                 #
2559                 vw = vw + Deltaw
2560                 #
2561                 if not BnotT:
2562                     Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
2563                 #
2564                 __j = __j + 1
2565             #
2566             if BnotT:
2567                 Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
2568             #
2569             Xn = vx1 + math.sqrt(__m-1) * EaX @ Ta @ Ua
2570         #--------------------------
2571         else:
2572             raise ValueError("VariantM has to be chosen in the authorized methods list.")
2573         #
2574         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
2575             Xn = CovarianceInflation( Xn,
2576                 selfA._parameters["InflationType"],
2577                 selfA._parameters["InflationFactor"],
2578                 )
2579         #
2580         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
2581         #--------------------------
2582         selfA._setInternalState("Xn", Xn)
2583         selfA._setInternalState("seed", numpy.random.get_state())
2584         #--------------------------
2585         #
2586         if selfA._parameters["StoreInternalVariables"] \
2587             or selfA._toStore("CostFunctionJ") \
2588             or selfA._toStore("CostFunctionJb") \
2589             or selfA._toStore("CostFunctionJo") \
2590             or selfA._toStore("APosterioriCovariance") \
2591             or selfA._toStore("InnovationAtCurrentAnalysis") \
2592             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
2593             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2594             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
2595             _Innovation = Ynpu - _HXa
2596         #
2597         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2598         # ---> avec analysis
2599         selfA.StoredVariables["Analysis"].store( Xa )
2600         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
2601             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
2602         if selfA._toStore("InnovationAtCurrentAnalysis"):
2603             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
2604         # ---> avec current state
2605         if selfA._parameters["StoreInternalVariables"] \
2606             or selfA._toStore("CurrentState"):
2607             selfA.StoredVariables["CurrentState"].store( Xn )
2608         if selfA._toStore("ForecastState"):
2609             selfA.StoredVariables["ForecastState"].store( EMX )
2610         if selfA._toStore("ForecastCovariance"):
2611             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(EMX) )
2612         if selfA._toStore("BMA"):
2613             selfA.StoredVariables["BMA"].store( EMX - Xa )
2614         if selfA._toStore("InnovationAtCurrentState"):
2615             selfA.StoredVariables["InnovationAtCurrentState"].store( - HE2 + Ynpu )
2616         if selfA._toStore("SimulatedObservationAtCurrentState") \
2617             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2618             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HE2 )
2619         # ---> autres
2620         if selfA._parameters["StoreInternalVariables"] \
2621             or selfA._toStore("CostFunctionJ") \
2622             or selfA._toStore("CostFunctionJb") \
2623             or selfA._toStore("CostFunctionJo") \
2624             or selfA._toStore("CurrentOptimum") \
2625             or selfA._toStore("APosterioriCovariance"):
2626             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
2627             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
2628             J   = Jb + Jo
2629             selfA.StoredVariables["CostFunctionJb"].store( Jb )
2630             selfA.StoredVariables["CostFunctionJo"].store( Jo )
2631             selfA.StoredVariables["CostFunctionJ" ].store( J )
2632             #
2633             if selfA._toStore("IndexOfOptimum") \
2634                 or selfA._toStore("CurrentOptimum") \
2635                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
2636                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
2637                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
2638                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2639                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2640             if selfA._toStore("IndexOfOptimum"):
2641                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
2642             if selfA._toStore("CurrentOptimum"):
2643                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
2644             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2645                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
2646             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
2647                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
2648             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
2649                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
2650             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
2651                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
2652         if selfA._toStore("APosterioriCovariance"):
2653             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
2654         if selfA._parameters["EstimationOf"] == "Parameters" \
2655             and J < previousJMinimum:
2656             previousJMinimum    = J
2657             XaMin               = Xa
2658             if selfA._toStore("APosterioriCovariance"):
2659                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
2660         # ---> Pour les smoothers
2661         if selfA._toStore("CurrentEnsembleState"):
2662             selfA.StoredVariables["CurrentEnsembleState"].store( Xn )
2663     #
2664     # Stockage final supplémentaire de l'optimum en estimation de paramètres
2665     # ----------------------------------------------------------------------
2666     if selfA._parameters["EstimationOf"] == "Parameters":
2667         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2668         selfA.StoredVariables["Analysis"].store( XaMin )
2669         if selfA._toStore("APosterioriCovariance"):
2670             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
2671         if selfA._toStore("BMA"):
2672             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
2673     #
2674     return 0
2675
2676 # ==============================================================================
2677 def mmqr(
2678         func     = None,
2679         x0       = None,
2680         fprime   = None,
2681         bounds   = None,
2682         quantile = 0.5,
2683         maxfun   = 15000,
2684         toler    = 1.e-06,
2685         y        = None,
2686         ):
2687     """
2688     Implémentation informatique de l'algorithme MMQR, basée sur la publication :
2689     David R. Hunter, Kenneth Lange, "Quantile Regression via an MM Algorithm",
2690     Journal of Computational and Graphical Statistics, 9, 1, pp.60-77, 2000.
2691     """
2692     #
2693     # Recuperation des donnees et informations initiales
2694     # --------------------------------------------------
2695     variables = numpy.ravel( x0 )
2696     mesures   = numpy.ravel( y )
2697     increment = sys.float_info[0]
2698     p         = variables.size
2699     n         = mesures.size
2700     quantile  = float(quantile)
2701     #
2702     # Calcul des parametres du MM
2703     # ---------------------------
2704     tn      = float(toler) / n
2705     e0      = -tn / math.log(tn)
2706     epsilon = (e0-tn)/(1+math.log(e0))
2707     #
2708     # Calculs d'initialisation
2709     # ------------------------
2710     residus  = mesures - numpy.ravel( func( variables ) )
2711     poids    = 1./(epsilon+numpy.abs(residus))
2712     veps     = 1. - 2. * quantile - residus * poids
2713     lastsurrogate = -numpy.sum(residus*veps) - (1.-2.*quantile)*numpy.sum(residus)
2714     iteration = 0
2715     #
2716     # Recherche iterative
2717     # -------------------
2718     while (increment > toler) and (iteration < maxfun) :
2719         iteration += 1
2720         #
2721         Derivees  = numpy.array(fprime(variables))
2722         Derivees  = Derivees.reshape(n,p) # Necessaire pour remettre en place la matrice si elle passe par des tuyaux YACS
2723         DeriveesT = Derivees.transpose()
2724         M         =   numpy.dot( DeriveesT , (numpy.array(numpy.matrix(p*[poids,]).T)*Derivees) )
2725         SM        =   numpy.transpose(numpy.dot( DeriveesT , veps ))
2726         step      = - numpy.linalg.lstsq( M, SM, rcond=-1 )[0]
2727         #
2728         variables = variables + step
2729         if bounds is not None:
2730             # Attention : boucle infinie à éviter si un intervalle est trop petit
2731             while( (variables < numpy.ravel(numpy.asmatrix(bounds)[:,0])).any() or (variables > numpy.ravel(numpy.asmatrix(bounds)[:,1])).any() ):
2732                 step      = step/2.
2733                 variables = variables - step
2734         residus   = mesures - numpy.ravel( func(variables) )
2735         surrogate = numpy.sum(residus**2 * poids) + (4.*quantile-2.) * numpy.sum(residus)
2736         #
2737         while ( (surrogate > lastsurrogate) and ( max(list(numpy.abs(step))) > 1.e-16 ) ) :
2738             step      = step/2.
2739             variables = variables - step
2740             residus   = mesures - numpy.ravel( func(variables) )
2741             surrogate = numpy.sum(residus**2 * poids) + (4.*quantile-2.) * numpy.sum(residus)
2742         #
2743         increment     = lastsurrogate-surrogate
2744         poids         = 1./(epsilon+numpy.abs(residus))
2745         veps          = 1. - 2. * quantile - residus * poids
2746         lastsurrogate = -numpy.sum(residus * veps) - (1.-2.*quantile)*numpy.sum(residus)
2747     #
2748     # Mesure d'écart
2749     # --------------
2750     Ecart = quantile * numpy.sum(residus) - numpy.sum( residus[residus<0] )
2751     #
2752     return variables, Ecart, [n,p,iteration,increment,0]
2753
2754 # ==============================================================================
2755 def multi3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, oneCycle):
2756     """
2757     3DVAR multi-pas et multi-méthodes
2758     """
2759     #
2760     # Initialisation
2761     # --------------
2762     if selfA._parameters["EstimationOf"] == "State":
2763         M = EM["Direct"].appliedTo
2764         #
2765         if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
2766             Xn = numpy.ravel(Xb).reshape((-1,1))
2767             selfA.StoredVariables["Analysis"].store( Xn )
2768             if selfA._toStore("APosterioriCovariance"):
2769                 if hasattr(B,"asfullmatrix"):
2770                     selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(Xn.size) )
2771                 else:
2772                     selfA.StoredVariables["APosterioriCovariance"].store( B )
2773             if selfA._toStore("ForecastState"):
2774                 selfA.StoredVariables["ForecastState"].store( Xn )
2775         elif selfA._parameters["nextStep"]:
2776             Xn = selfA._getInternalState("Xn")
2777     else:
2778         Xn = numpy.ravel(Xb).reshape((-1,1))
2779     #
2780     if hasattr(Y,"stepnumber"):
2781         duration = Y.stepnumber()
2782     else:
2783         duration = 2
2784     #
2785     # Multi-pas
2786     for step in range(duration-1):
2787         if hasattr(Y,"store"):
2788             Ynpu = numpy.ravel( Y[step+1] ).reshape((-1,1))
2789         else:
2790             Ynpu = numpy.ravel( Y ).reshape((-1,1))
2791         #
2792         if selfA._parameters["EstimationOf"] == "State": # Forecast
2793             Xn_predicted = M( Xn )
2794             if selfA._toStore("ForecastState"):
2795                 selfA.StoredVariables["ForecastState"].store( Xn_predicted )
2796         elif selfA._parameters["EstimationOf"] == "Parameters": # No forecast
2797             # --- > Par principe, M = Id, Q = 0
2798             Xn_predicted = Xn
2799         Xn_predicted = numpy.ravel(Xn_predicted).reshape((-1,1))
2800         #
2801         oneCycle(selfA, Xn_predicted, Ynpu, U, HO, None, None, R, B, None)
2802         #
2803         Xn = selfA.StoredVariables["Analysis"][-1]
2804         #--------------------------
2805         selfA._setInternalState("Xn", Xn)
2806     #
2807     return 0
2808
2809 # ==============================================================================
2810 def psas3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
2811     """
2812     3DVAR PSAS
2813     """
2814     #
2815     # Initialisations
2816     # ---------------
2817     Hm = HO["Direct"].appliedTo
2818     #
2819     if HO["AppliedInX"] is not None and "HXb" in HO["AppliedInX"]:
2820         HXb = Hm( Xb, HO["AppliedInX"]["HXb"] )
2821     else:
2822         HXb = Hm( Xb )
2823     HXb = numpy.asmatrix(numpy.ravel( HXb )).T
2824     if Y.size != HXb.size:
2825         raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
2826     if max(Y.shape) != max(HXb.shape):
2827         raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
2828     #
2829     if selfA._toStore("JacobianMatrixAtBackground"):
2830         HtMb = HO["Tangent"].asMatrix(ValueForMethodForm = Xb)
2831         HtMb = HtMb.reshape(Y.size,Xb.size) # ADAO & check shape
2832         selfA.StoredVariables["JacobianMatrixAtBackground"].store( HtMb )
2833     #
2834     Ht = HO["Tangent"].asMatrix(Xb)
2835     BHT = B * Ht.T
2836     HBHTpR = R + Ht * BHT
2837     Innovation = Y - HXb
2838     #
2839     Xini = numpy.zeros(Xb.shape)
2840     #
2841     # Définition de la fonction-coût
2842     # ------------------------------
2843     def CostFunction(w):
2844         _W = w.reshape((-1,1))
2845         if selfA._parameters["StoreInternalVariables"] or \
2846             selfA._toStore("CurrentState") or \
2847             selfA._toStore("CurrentOptimum"):
2848             selfA.StoredVariables["CurrentState"].store( Xb + BHT @ _W )
2849         if selfA._toStore("SimulatedObservationAtCurrentState") or \
2850             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2851             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( Hm( Xb + BHT @ _W ) )
2852         if selfA._toStore("InnovationAtCurrentState"):
2853             selfA.StoredVariables["InnovationAtCurrentState"].store( Innovation )
2854         #
2855         Jb  = float( 0.5 * _W.T @ (HBHTpR @ _W) )
2856         Jo  = float( - _W.T @ Innovation )
2857         J   = Jb + Jo
2858         #
2859         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
2860         selfA.StoredVariables["CostFunctionJb"].store( Jb )
2861         selfA.StoredVariables["CostFunctionJo"].store( Jo )
2862         selfA.StoredVariables["CostFunctionJ" ].store( J )
2863         if selfA._toStore("IndexOfOptimum") or \
2864             selfA._toStore("CurrentOptimum") or \
2865             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
2866             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
2867             selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
2868             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2869             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2870         if selfA._toStore("IndexOfOptimum"):
2871             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
2872         if selfA._toStore("CurrentOptimum"):
2873             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
2874         if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2875             selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
2876         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
2877             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
2878         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
2879             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
2880         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
2881             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
2882         return J
2883     #
2884     def GradientOfCostFunction(w):
2885         _W = w.reshape((-1,1))
2886         GradJb  = HBHTpR @ _W
2887         GradJo  = - Innovation
2888         GradJ   = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
2889         return GradJ
2890     #
2891     # Minimisation de la fonctionnelle
2892     # --------------------------------
2893     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
2894     #
2895     if selfA._parameters["Minimizer"] == "LBFGSB":
2896         if "0.19" <= scipy.version.version <= "1.1.0":
2897             import lbfgsbhlt as optimiseur
2898         else:
2899             import scipy.optimize as optimiseur
2900         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
2901             func        = CostFunction,
2902             x0          = Xini,
2903             fprime      = GradientOfCostFunction,
2904             args        = (),
2905             bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
2906             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
2907             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
2908             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
2909             iprint      = selfA._parameters["optiprint"],
2910             )
2911         nfeval = Informations['funcalls']
2912         rc     = Informations['warnflag']
2913     elif selfA._parameters["Minimizer"] == "TNC":
2914         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
2915             func        = CostFunction,
2916             x0          = Xini,
2917             fprime      = GradientOfCostFunction,
2918             args        = (),
2919             bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
2920             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
2921             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
2922             ftol        = selfA._parameters["CostDecrementTolerance"],
2923             messages    = selfA._parameters["optmessages"],
2924             )
2925     elif selfA._parameters["Minimizer"] == "CG":
2926         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
2927             f           = CostFunction,
2928             x0          = Xini,
2929             fprime      = GradientOfCostFunction,
2930             args        = (),
2931             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2932             gtol        = selfA._parameters["GradientNormTolerance"],
2933             disp        = selfA._parameters["optdisp"],
2934             full_output = True,
2935             )
2936     elif selfA._parameters["Minimizer"] == "NCG":
2937         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
2938             f           = CostFunction,
2939             x0          = Xini,
2940             fprime      = GradientOfCostFunction,
2941             args        = (),
2942             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2943             avextol     = selfA._parameters["CostDecrementTolerance"],
2944             disp        = selfA._parameters["optdisp"],
2945             full_output = True,
2946             )
2947     elif selfA._parameters["Minimizer"] == "BFGS":
2948         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
2949             f           = CostFunction,
2950             x0          = Xini,
2951             fprime      = GradientOfCostFunction,
2952             args        = (),
2953             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2954             gtol        = selfA._parameters["GradientNormTolerance"],
2955             disp        = selfA._parameters["optdisp"],
2956             full_output = True,
2957             )
2958     else:
2959         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
2960     #
2961     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2962     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
2963     #
2964     # Correction pour pallier a un bug de TNC sur le retour du Minimum
2965     # ----------------------------------------------------------------
2966     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
2967         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
2968     else:
2969         Minimum = Xb + BHT @ Minimum.reshape((-1,1))
2970     #
2971     Xa = Minimum
2972     #--------------------------
2973     #
2974     selfA.StoredVariables["Analysis"].store( Xa )
2975     #
2976     if selfA._toStore("OMA") or \
2977         selfA._toStore("SigmaObs2") or \
2978         selfA._toStore("SimulationQuantiles") or \
2979         selfA._toStore("SimulatedObservationAtOptimum"):
2980         if selfA._toStore("SimulatedObservationAtCurrentState"):
2981             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
2982         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2983             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
2984         else:
2985             HXa = Hm( Xa )
2986     #
2987     if selfA._toStore("APosterioriCovariance") or \
2988         selfA._toStore("SimulationQuantiles") or \
2989         selfA._toStore("JacobianMatrixAtOptimum") or \
2990         selfA._toStore("KalmanGainAtOptimum"):
2991         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
2992         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
2993     if selfA._toStore("APosterioriCovariance") or \
2994         selfA._toStore("SimulationQuantiles") or \
2995         selfA._toStore("KalmanGainAtOptimum"):
2996         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
2997         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
2998     if selfA._toStore("APosterioriCovariance") or \
2999         selfA._toStore("SimulationQuantiles"):
3000         BI = B.getI()
3001         RI = R.getI()
3002         A = HessienneEstimation(Xa.size, HaM, HtM, BI, RI)
3003     if selfA._toStore("APosterioriCovariance"):
3004         selfA.StoredVariables["APosterioriCovariance"].store( A )
3005     if selfA._toStore("JacobianMatrixAtOptimum"):
3006         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
3007     if selfA._toStore("KalmanGainAtOptimum"):
3008         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
3009         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
3010         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
3011     #
3012     # Calculs et/ou stockages supplémentaires
3013     # ---------------------------------------
3014     if selfA._toStore("Innovation") or \
3015         selfA._toStore("SigmaObs2") or \
3016         selfA._toStore("MahalanobisConsistency") or \
3017         selfA._toStore("OMB"):
3018         d  = Y - HXb
3019     if selfA._toStore("Innovation"):
3020         selfA.StoredVariables["Innovation"].store( d )
3021     if selfA._toStore("BMA"):
3022         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
3023     if selfA._toStore("OMA"):
3024         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
3025     if selfA._toStore("OMB"):
3026         selfA.StoredVariables["OMB"].store( d )
3027     if selfA._toStore("SigmaObs2"):
3028         TraceR = R.trace(Y.size)
3029         selfA.StoredVariables["SigmaObs2"].store( float( (d.T @ (numpy.ravel(Y)-numpy.ravel(HXa))) ) / TraceR )
3030     if selfA._toStore("MahalanobisConsistency"):
3031         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
3032     if selfA._toStore("SimulationQuantiles"):
3033         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
3034     if selfA._toStore("SimulatedObservationAtBackground"):
3035         selfA.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
3036     if selfA._toStore("SimulatedObservationAtOptimum"):
3037         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
3038     #
3039     return 0
3040
3041 # ==============================================================================
3042 def senkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula16"):
3043     """
3044     Stochastic EnKF
3045     """
3046     if selfA._parameters["EstimationOf"] == "Parameters":
3047         selfA._parameters["StoreInternalVariables"] = True
3048     #
3049     # Opérateurs
3050     H = HO["Direct"].appliedControledFormTo
3051     #
3052     if selfA._parameters["EstimationOf"] == "State":
3053         M = EM["Direct"].appliedControledFormTo
3054     #
3055     if CM is not None and "Tangent" in CM and U is not None:
3056         Cm = CM["Tangent"].asMatrix(Xb)
3057     else:
3058         Cm = None
3059     #
3060     # Durée d'observation et tailles
3061     if hasattr(Y,"stepnumber"):
3062         duration = Y.stepnumber()
3063         __p = numpy.cumprod(Y.shape())[-1]
3064     else:
3065         duration = 2
3066         __p = numpy.array(Y).size
3067     #
3068     # Précalcul des inversions de B et R
3069     if selfA._parameters["StoreInternalVariables"] \
3070         or selfA._toStore("CostFunctionJ") \
3071         or selfA._toStore("CostFunctionJb") \
3072         or selfA._toStore("CostFunctionJo") \
3073         or selfA._toStore("CurrentOptimum") \
3074         or selfA._toStore("APosterioriCovariance"):
3075         BI = B.getI()
3076         RI = R.getI()
3077     #
3078     __n = Xb.size
3079     __m = selfA._parameters["NumberOfMembers"]
3080     #
3081     if hasattr(R,"asfullmatrix"): Rn = R.asfullmatrix(__p)
3082     else:                         Rn = R
3083     #
3084     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
3085         if hasattr(B,"asfullmatrix"): Pn = B.asfullmatrix(__n)
3086         else:                         Pn = B
3087         Xn = EnsembleOfBackgroundPerturbations( Xb, Pn, __m )
3088         selfA.StoredVariables["Analysis"].store( Xb )
3089         if selfA._toStore("APosterioriCovariance"):
3090             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
3091         selfA._setInternalState("seed", numpy.random.get_state())
3092     elif selfA._parameters["nextStep"]:
3093         Xn = selfA._getInternalState("Xn")
3094     #
3095     previousJMinimum = numpy.finfo(float).max
3096     #
3097     for step in range(duration-1):
3098         numpy.random.set_state(selfA._getInternalState("seed"))
3099         if hasattr(Y,"store"):
3100             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
3101         else:
3102             Ynpu = numpy.ravel( Y ).reshape((__p,1))
3103         #
3104         if U is not None:
3105             if hasattr(U,"store") and len(U)>1:
3106                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
3107             elif hasattr(U,"store") and len(U)==1:
3108                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
3109             else:
3110                 Un = numpy.asmatrix(numpy.ravel( U )).T
3111         else:
3112             Un = None
3113         #
3114         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
3115             Xn = CovarianceInflation( Xn,
3116                 selfA._parameters["InflationType"],
3117                 selfA._parameters["InflationFactor"],
3118                 )
3119         #
3120         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
3121             EMX = M( [(Xn[:,i], Un) for i in range(__m)],
3122                 argsAsSerie = True,
3123                 returnSerieAsArrayMatrix = True )
3124             Xn_predicted = EnsemblePerturbationWithGivenCovariance( EMX, Q )
3125             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
3126                 argsAsSerie = True,
3127                 returnSerieAsArrayMatrix = True )
3128             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
3129                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
3130                 Xn_predicted = Xn_predicted + Cm * Un
3131         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
3132             # --- > Par principe, M = Id, Q = 0
3133             Xn_predicted = EMX = Xn
3134             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
3135                 argsAsSerie = True,
3136                 returnSerieAsArrayMatrix = True )
3137         #
3138         # Mean of forecast and observation of forecast
3139         Xfm  = Xn_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
3140         Hfm  = HX_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
3141         #
3142         #--------------------------
3143         if VariantM == "KalmanFilterFormula05":
3144             PfHT, HPfHT = 0., 0.
3145             for i in range(__m):
3146                 Exfi = Xn_predicted[:,i].reshape((__n,1)) - Xfm
3147                 Eyfi = HX_predicted[:,i].reshape((__p,1)) - Hfm
3148                 PfHT  += Exfi * Eyfi.T
3149                 HPfHT += Eyfi * Eyfi.T
3150             PfHT  = (1./(__m-1)) * PfHT
3151             HPfHT = (1./(__m-1)) * HPfHT
3152             Kn     = PfHT * ( R + HPfHT ).I
3153             del PfHT, HPfHT
3154             #
3155             for i in range(__m):
3156                 ri = numpy.random.multivariate_normal(numpy.zeros(__p), Rn)
3157                 Xn[:,i] = numpy.ravel(Xn_predicted[:,i]) + Kn @ (numpy.ravel(Ynpu) + ri - HX_predicted[:,i])
3158         #--------------------------
3159         elif VariantM == "KalmanFilterFormula16":
3160             EpY   = EnsembleOfCenteredPerturbations(Ynpu, Rn, __m)
3161             EpYm  = EpY.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
3162             #
3163             EaX   = EnsembleOfAnomalies( Xn_predicted ) / math.sqrt(__m-1)
3164             EaY = (HX_predicted - Hfm - EpY + EpYm) / math.sqrt(__m-1)
3165             #
3166             Kn = EaX @ EaY.T @ numpy.linalg.inv( EaY @ EaY.T)
3167             #
3168             for i in range(__m):
3169                 Xn[:,i] = numpy.ravel(Xn_predicted[:,i]) + Kn @ (numpy.ravel(EpY[:,i]) - HX_predicted[:,i])
3170         #--------------------------
3171         else:
3172             raise ValueError("VariantM has to be chosen in the authorized methods list.")
3173         #
3174         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
3175             Xn = CovarianceInflation( Xn,
3176                 selfA._parameters["InflationType"],
3177                 selfA._parameters["InflationFactor"],
3178                 )
3179         #
3180         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
3181         #--------------------------
3182         selfA._setInternalState("Xn", Xn)
3183         selfA._setInternalState("seed", numpy.random.get_state())
3184         #--------------------------
3185         #
3186         if selfA._parameters["StoreInternalVariables"] \
3187             or selfA._toStore("CostFunctionJ") \
3188             or selfA._toStore("CostFunctionJb") \
3189             or selfA._toStore("CostFunctionJo") \
3190             or selfA._toStore("APosterioriCovariance") \
3191             or selfA._toStore("InnovationAtCurrentAnalysis") \
3192             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
3193             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3194             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
3195             _Innovation = Ynpu - _HXa
3196         #
3197         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3198         # ---> avec analysis
3199         selfA.StoredVariables["Analysis"].store( Xa )
3200         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
3201             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
3202         if selfA._toStore("InnovationAtCurrentAnalysis"):
3203             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
3204         # ---> avec current state
3205         if selfA._parameters["StoreInternalVariables"] \
3206             or selfA._toStore("CurrentState"):
3207             selfA.StoredVariables["CurrentState"].store( Xn )
3208         if selfA._toStore("ForecastState"):
3209             selfA.StoredVariables["ForecastState"].store( EMX )
3210         if selfA._toStore("ForecastCovariance"):
3211             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(EMX) )
3212         if selfA._toStore("BMA"):
3213             selfA.StoredVariables["BMA"].store( EMX - Xa )
3214         if selfA._toStore("InnovationAtCurrentState"):
3215             selfA.StoredVariables["InnovationAtCurrentState"].store( - HX_predicted + Ynpu )
3216         if selfA._toStore("SimulatedObservationAtCurrentState") \
3217             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3218             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
3219         # ---> autres
3220         if selfA._parameters["StoreInternalVariables"] \
3221             or selfA._toStore("CostFunctionJ") \
3222             or selfA._toStore("CostFunctionJb") \
3223             or selfA._toStore("CostFunctionJo") \
3224             or selfA._toStore("CurrentOptimum") \
3225             or selfA._toStore("APosterioriCovariance"):
3226             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
3227             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
3228             J   = Jb + Jo
3229             selfA.StoredVariables["CostFunctionJb"].store( Jb )
3230             selfA.StoredVariables["CostFunctionJo"].store( Jo )
3231             selfA.StoredVariables["CostFunctionJ" ].store( J )
3232             #
3233             if selfA._toStore("IndexOfOptimum") \
3234                 or selfA._toStore("CurrentOptimum") \
3235                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
3236                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
3237                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
3238                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3239                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3240             if selfA._toStore("IndexOfOptimum"):
3241                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3242             if selfA._toStore("CurrentOptimum"):
3243                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
3244             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3245                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
3246             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3247                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3248             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3249                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3250             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3251                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3252         if selfA._toStore("APosterioriCovariance"):
3253             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
3254         if selfA._parameters["EstimationOf"] == "Parameters" \
3255             and J < previousJMinimum:
3256             previousJMinimum    = J
3257             XaMin               = Xa
3258             if selfA._toStore("APosterioriCovariance"):
3259                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
3260         # ---> Pour les smoothers
3261         if selfA._toStore("CurrentEnsembleState"):
3262             selfA.StoredVariables["CurrentEnsembleState"].store( Xn )
3263     #
3264     # Stockage final supplémentaire de l'optimum en estimation de paramètres
3265     # ----------------------------------------------------------------------
3266     if selfA._parameters["EstimationOf"] == "Parameters":
3267         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3268         selfA.StoredVariables["Analysis"].store( XaMin )
3269         if selfA._toStore("APosterioriCovariance"):
3270             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
3271         if selfA._toStore("BMA"):
3272             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
3273     #
3274     return 0
3275
3276 # ==============================================================================
3277 def std3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3278     """
3279     3DVAR
3280     """
3281     #
3282     # Initialisations
3283     # ---------------
3284     Hm = HO["Direct"].appliedTo
3285     Ha = HO["Adjoint"].appliedInXTo
3286     #
3287     if HO["AppliedInX"] is not None and "HXb" in HO["AppliedInX"]:
3288         HXb = Hm( Xb, HO["AppliedInX"]["HXb"] )
3289     else:
3290         HXb = Hm( Xb )
3291     HXb = HXb.reshape((-1,1))
3292     if Y.size != HXb.size:
3293         raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
3294     if max(Y.shape) != max(HXb.shape):
3295         raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
3296     #
3297     if selfA._toStore("JacobianMatrixAtBackground"):
3298         HtMb = HO["Tangent"].asMatrix(ValueForMethodForm = Xb)
3299         HtMb = HtMb.reshape(Y.size,Xb.size) # ADAO & check shape
3300         selfA.StoredVariables["JacobianMatrixAtBackground"].store( HtMb )
3301     #
3302     BI = B.getI()
3303     RI = R.getI()
3304     #
3305     Xini = selfA._parameters["InitializationPoint"]
3306     #
3307     # Définition de la fonction-coût
3308     # ------------------------------
3309     def CostFunction(x):
3310         _X  = numpy.ravel( x ).reshape((-1,1))
3311         if selfA._parameters["StoreInternalVariables"] or \
3312             selfA._toStore("CurrentState") or \
3313             selfA._toStore("CurrentOptimum"):
3314             selfA.StoredVariables["CurrentState"].store( _X )
3315         _HX = Hm( _X ).reshape((-1,1))
3316         _Innovation = Y - _HX
3317         if selfA._toStore("SimulatedObservationAtCurrentState") or \
3318             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3319             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
3320         if selfA._toStore("InnovationAtCurrentState"):
3321             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
3322         #
3323         Jb  = float( 0.5 * (_X - Xb).T * (BI * (_X - Xb)) )
3324         Jo  = float( 0.5 * _Innovation.T * (RI * _Innovation) )
3325         J   = Jb + Jo
3326         #
3327         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
3328         selfA.StoredVariables["CostFunctionJb"].store( Jb )
3329         selfA.StoredVariables["CostFunctionJo"].store( Jo )
3330         selfA.StoredVariables["CostFunctionJ" ].store( J )
3331         if selfA._toStore("IndexOfOptimum") or \
3332             selfA._toStore("CurrentOptimum") or \
3333             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
3334             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
3335             selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
3336             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3337             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3338         if selfA._toStore("IndexOfOptimum"):
3339             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3340         if selfA._toStore("CurrentOptimum"):
3341             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
3342         if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3343             selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
3344         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3345             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3346         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3347             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3348         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3349             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3350         return J
3351     #
3352     def GradientOfCostFunction(x):
3353         _X      = x.reshape((-1,1))
3354         _HX     = Hm( _X ).reshape((-1,1))
3355         GradJb  = BI * (_X - Xb)
3356         GradJo  = - Ha( (_X, RI * (Y - _HX)) )
3357         GradJ   = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
3358         return GradJ
3359     #
3360     # Minimisation de la fonctionnelle
3361     # --------------------------------
3362     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
3363     #
3364     if selfA._parameters["Minimizer"] == "LBFGSB":
3365         if "0.19" <= scipy.version.version <= "1.1.0":
3366             import lbfgsbhlt as optimiseur
3367         else:
3368             import scipy.optimize as optimiseur
3369         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
3370             func        = CostFunction,
3371             x0          = Xini,
3372             fprime      = GradientOfCostFunction,
3373             args        = (),
3374             bounds      = selfA._parameters["Bounds"],
3375             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
3376             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
3377             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3378             iprint      = selfA._parameters["optiprint"],
3379             )
3380         nfeval = Informations['funcalls']
3381         rc     = Informations['warnflag']
3382     elif selfA._parameters["Minimizer"] == "TNC":
3383         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
3384             func        = CostFunction,
3385             x0          = Xini,
3386             fprime      = GradientOfCostFunction,
3387             args        = (),
3388             bounds      = selfA._parameters["Bounds"],
3389             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
3390             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3391             ftol        = selfA._parameters["CostDecrementTolerance"],
3392             messages    = selfA._parameters["optmessages"],
3393             )
3394     elif selfA._parameters["Minimizer"] == "CG":
3395         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
3396             f           = CostFunction,
3397             x0          = Xini,
3398             fprime      = GradientOfCostFunction,
3399             args        = (),
3400             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3401             gtol        = selfA._parameters["GradientNormTolerance"],
3402             disp        = selfA._parameters["optdisp"],
3403             full_output = True,
3404             )
3405     elif selfA._parameters["Minimizer"] == "NCG":
3406         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
3407             f           = CostFunction,
3408             x0          = Xini,
3409             fprime      = GradientOfCostFunction,
3410             args        = (),
3411             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3412             avextol     = selfA._parameters["CostDecrementTolerance"],
3413             disp        = selfA._parameters["optdisp"],
3414             full_output = True,
3415             )
3416     elif selfA._parameters["Minimizer"] == "BFGS":
3417         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
3418             f           = CostFunction,
3419             x0          = Xini,
3420             fprime      = GradientOfCostFunction,
3421             args        = (),
3422             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3423             gtol        = selfA._parameters["GradientNormTolerance"],
3424             disp        = selfA._parameters["optdisp"],
3425             full_output = True,
3426             )
3427     else:
3428         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
3429     #
3430     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3431     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
3432     #
3433     # Correction pour pallier a un bug de TNC sur le retour du Minimum
3434     # ----------------------------------------------------------------
3435     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
3436         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
3437     #
3438     Xa = Minimum
3439     #--------------------------
3440     #
3441     selfA.StoredVariables["Analysis"].store( Xa )
3442     #
3443     if selfA._toStore("OMA") or \
3444         selfA._toStore("SigmaObs2") or \
3445         selfA._toStore("SimulationQuantiles") or \
3446         selfA._toStore("SimulatedObservationAtOptimum"):
3447         if selfA._toStore("SimulatedObservationAtCurrentState"):
3448             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
3449         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3450             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
3451         else:
3452             HXa = Hm( Xa )
3453     #
3454     if selfA._toStore("APosterioriCovariance") or \
3455         selfA._toStore("SimulationQuantiles") or \
3456         selfA._toStore("JacobianMatrixAtOptimum") or \
3457         selfA._toStore("KalmanGainAtOptimum"):
3458         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
3459         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
3460     if selfA._toStore("APosterioriCovariance") or \
3461         selfA._toStore("SimulationQuantiles") or \
3462         selfA._toStore("KalmanGainAtOptimum"):
3463         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
3464         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
3465     if selfA._toStore("APosterioriCovariance") or \
3466         selfA._toStore("SimulationQuantiles"):
3467         A = HessienneEstimation(Xa.size, HaM, HtM, BI, RI)
3468     if selfA._toStore("APosterioriCovariance"):
3469         selfA.StoredVariables["APosterioriCovariance"].store( A )
3470     if selfA._toStore("JacobianMatrixAtOptimum"):
3471         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
3472     if selfA._toStore("KalmanGainAtOptimum"):
3473         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
3474         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
3475         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
3476     #
3477     # Calculs et/ou stockages supplémentaires
3478     # ---------------------------------------
3479     if selfA._toStore("Innovation") or \
3480         selfA._toStore("SigmaObs2") or \
3481         selfA._toStore("MahalanobisConsistency") or \
3482         selfA._toStore("OMB"):
3483         d  = Y - HXb
3484     if selfA._toStore("Innovation"):
3485         selfA.StoredVariables["Innovation"].store( d )
3486     if selfA._toStore("BMA"):
3487         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
3488     if selfA._toStore("OMA"):
3489         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
3490     if selfA._toStore("OMB"):
3491         selfA.StoredVariables["OMB"].store( d )
3492     if selfA._toStore("SigmaObs2"):
3493         TraceR = R.trace(Y.size)
3494         selfA.StoredVariables["SigmaObs2"].store( float( (d.T @ (numpy.ravel(Y)-numpy.ravel(HXa))) ) / TraceR )
3495     if selfA._toStore("MahalanobisConsistency"):
3496         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
3497     if selfA._toStore("SimulationQuantiles"):
3498         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
3499     if selfA._toStore("SimulatedObservationAtBackground"):
3500         selfA.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
3501     if selfA._toStore("SimulatedObservationAtOptimum"):
3502         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
3503     #
3504     return 0
3505
3506 # ==============================================================================
3507 def std4dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3508     """
3509     4DVAR
3510     """
3511     #
3512     # Initialisations
3513     # ---------------
3514     #
3515     # Opérateurs
3516     Hm = HO["Direct"].appliedControledFormTo
3517     Mm = EM["Direct"].appliedControledFormTo
3518     #
3519     if CM is not None and "Tangent" in CM and U is not None:
3520         Cm = CM["Tangent"].asMatrix(Xb)
3521     else:
3522         Cm = None
3523     #
3524     def Un(_step):
3525         if U is not None:
3526             if hasattr(U,"store") and 1<=_step<len(U) :
3527                 _Un = numpy.asmatrix(numpy.ravel( U[_step] )).T
3528             elif hasattr(U,"store") and len(U)==1:
3529                 _Un = numpy.asmatrix(numpy.ravel( U[0] )).T
3530             else:
3531                 _Un = numpy.asmatrix(numpy.ravel( U )).T
3532         else:
3533             _Un = None
3534         return _Un
3535     def CmUn(_xn,_un):
3536         if Cm is not None and _un is not None: # Attention : si Cm est aussi dans M, doublon !
3537             _Cm   = Cm.reshape(_xn.size,_un.size) # ADAO & check shape
3538             _CmUn = _Cm * _un
3539         else:
3540             _CmUn = 0.
3541         return _CmUn
3542     #
3543     # Remarque : les observations sont exploitées à partir du pas de temps
3544     # numéro 1, et sont utilisées dans Yo comme rangées selon ces indices.
3545     # Donc le pas 0 n'est pas utilisé puisque la première étape commence
3546     # avec l'observation du pas 1.
3547     #
3548     # Nombre de pas identique au nombre de pas d'observations
3549     if hasattr(Y,"stepnumber"):
3550         duration = Y.stepnumber()
3551     else:
3552         duration = 2
3553     #
3554     # Précalcul des inversions de B et R
3555     BI = B.getI()
3556     RI = R.getI()
3557     #
3558     # Point de démarrage de l'optimisation
3559     Xini = selfA._parameters["InitializationPoint"]
3560     #
3561     # Définition de la fonction-coût
3562     # ------------------------------
3563     selfA.DirectCalculation = [None,] # Le pas 0 n'est pas observé
3564     selfA.DirectInnovation  = [None,] # Le pas 0 n'est pas observé
3565     def CostFunction(x):
3566         _X  = numpy.asmatrix(numpy.ravel( x )).T
3567         if selfA._parameters["StoreInternalVariables"] or \
3568             selfA._toStore("CurrentState") or \
3569             selfA._toStore("CurrentOptimum"):
3570             selfA.StoredVariables["CurrentState"].store( _X )
3571         Jb  = float( 0.5 * (_X - Xb).T * BI * (_X - Xb) )
3572         selfA.DirectCalculation = [None,]
3573         selfA.DirectInnovation  = [None,]
3574         Jo  = 0.
3575         _Xn = _X
3576         for step in range(0,duration-1):
3577             if hasattr(Y,"store"):
3578                 _Ynpu = numpy.asmatrix(numpy.ravel( Y[step+1] )).T
3579             else:
3580                 _Ynpu = numpy.asmatrix(numpy.ravel( Y )).T
3581             _Un = Un(step)
3582             #
3583             # Etape d'évolution
3584             if selfA._parameters["EstimationOf"] == "State":
3585                 _Xn = Mm( (_Xn, _Un) ) + CmUn(_Xn, _Un)
3586             elif selfA._parameters["EstimationOf"] == "Parameters":
3587                 pass
3588             #
3589             if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3590                 _Xn = ApplyBounds( _Xn, ForceNumericBounds(selfA._parameters["Bounds"]) )
3591             #
3592             # Etape de différence aux observations
3593             if selfA._parameters["EstimationOf"] == "State":
3594                 _YmHMX = _Ynpu - numpy.asmatrix(numpy.ravel( Hm( (_Xn, None) ) )).T
3595             elif selfA._parameters["EstimationOf"] == "Parameters":
3596                 _YmHMX = _Ynpu - numpy.asmatrix(numpy.ravel( Hm( (_Xn, _Un) ) )).T - CmUn(_Xn, _Un)
3597             #
3598             # Stockage de l'état
3599             selfA.DirectCalculation.append( _Xn )
3600             selfA.DirectInnovation.append( _YmHMX )
3601             #
3602             # Ajout dans la fonctionnelle d'observation
3603             Jo = Jo + 0.5 * float( _YmHMX.T * RI * _YmHMX )
3604         J = Jb + Jo
3605         #
3606         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
3607         selfA.StoredVariables["CostFunctionJb"].store( Jb )
3608         selfA.StoredVariables["CostFunctionJo"].store( Jo )
3609         selfA.StoredVariables["CostFunctionJ" ].store( J )
3610         if selfA._toStore("IndexOfOptimum") or \
3611             selfA._toStore("CurrentOptimum") or \
3612             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
3613             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
3614             selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3615             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3616         if selfA._toStore("IndexOfOptimum"):
3617             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3618         if selfA._toStore("CurrentOptimum"):
3619             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
3620         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3621             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3622         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3623             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3624         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3625             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3626         return J
3627     #
3628     def GradientOfCostFunction(x):
3629         _X      = numpy.asmatrix(numpy.ravel( x )).T
3630         GradJb  = BI * (_X - Xb)
3631         GradJo  = 0.
3632         for step in range(duration-1,0,-1):
3633             # Étape de récupération du dernier stockage de l'évolution
3634             _Xn = selfA.DirectCalculation.pop()
3635             # Étape de récupération du dernier stockage de l'innovation
3636             _YmHMX = selfA.DirectInnovation.pop()
3637             # Calcul des adjoints
3638             Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = _Xn)
3639             Ha = Ha.reshape(_Xn.size,_YmHMX.size) # ADAO & check shape
3640             Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = _Xn)
3641             Ma = Ma.reshape(_Xn.size,_Xn.size) # ADAO & check shape
3642             # Calcul du gradient par état adjoint
3643             GradJo = GradJo + Ha * (RI * _YmHMX) # Équivaut pour Ha linéaire à : Ha( (_Xn, RI * _YmHMX) )
3644             GradJo = Ma * GradJo                 # Équivaut pour Ma linéaire à : Ma( (_Xn, GradJo) )
3645         GradJ = numpy.ravel( GradJb ) - numpy.ravel( GradJo )
3646         return GradJ
3647     #
3648     # Minimisation de la fonctionnelle
3649     # --------------------------------
3650     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
3651     #
3652     if selfA._parameters["Minimizer"] == "LBFGSB":
3653         if "0.19" <= scipy.version.version <= "1.1.0":
3654             import lbfgsbhlt as optimiseur
3655         else:
3656             import scipy.optimize as optimiseur
3657         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
3658             func        = CostFunction,
3659             x0          = Xini,
3660             fprime      = GradientOfCostFunction,
3661             args        = (),
3662             bounds      = selfA._parameters["Bounds"],
3663             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
3664             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
3665             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3666             iprint      = selfA._parameters["optiprint"],
3667             )
3668         nfeval = Informations['funcalls']
3669         rc     = Informations['warnflag']
3670     elif selfA._parameters["Minimizer"] == "TNC":
3671         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
3672             func        = CostFunction,
3673             x0          = Xini,
3674             fprime      = GradientOfCostFunction,
3675             args        = (),
3676             bounds      = selfA._parameters["Bounds"],
3677             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
3678             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3679             ftol        = selfA._parameters["CostDecrementTolerance"],
3680             messages    = selfA._parameters["optmessages"],
3681             )
3682     elif selfA._parameters["Minimizer"] == "CG":
3683         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
3684             f           = CostFunction,
3685             x0          = Xini,
3686             fprime      = GradientOfCostFunction,
3687             args        = (),
3688             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3689             gtol        = selfA._parameters["GradientNormTolerance"],
3690             disp        = selfA._parameters["optdisp"],
3691             full_output = True,
3692             )
3693     elif selfA._parameters["Minimizer"] == "NCG":
3694         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
3695             f           = CostFunction,
3696             x0          = Xini,
3697             fprime      = GradientOfCostFunction,
3698             args        = (),
3699             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3700             avextol     = selfA._parameters["CostDecrementTolerance"],
3701             disp        = selfA._parameters["optdisp"],
3702             full_output = True,
3703             )
3704     elif selfA._parameters["Minimizer"] == "BFGS":
3705         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
3706             f           = CostFunction,
3707             x0          = Xini,
3708             fprime      = GradientOfCostFunction,
3709             args        = (),
3710             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3711             gtol        = selfA._parameters["GradientNormTolerance"],
3712             disp        = selfA._parameters["optdisp"],
3713             full_output = True,
3714             )
3715     else:
3716         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
3717     #
3718     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3719     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
3720     #
3721     # Correction pour pallier a un bug de TNC sur le retour du Minimum
3722     # ----------------------------------------------------------------
3723     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
3724         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
3725     #
3726     # Obtention de l'analyse
3727     # ----------------------
3728     Xa = Minimum
3729     #
3730     selfA.StoredVariables["Analysis"].store( Xa )
3731     #
3732     # Calculs et/ou stockages supplémentaires
3733     # ---------------------------------------
3734     if selfA._toStore("BMA"):
3735         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
3736     #
3737     return 0
3738
3739 # ==============================================================================
3740 def stdkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3741     """
3742     Standard Kalman Filter
3743     """
3744     if selfA._parameters["EstimationOf"] == "Parameters":
3745         selfA._parameters["StoreInternalVariables"] = True
3746     #
3747     # Opérateurs
3748     # ----------
3749     Ht = HO["Tangent"].asMatrix(Xb)
3750     Ha = HO["Adjoint"].asMatrix(Xb)
3751     #
3752     if selfA._parameters["EstimationOf"] == "State":
3753         Mt = EM["Tangent"].asMatrix(Xb)
3754         Ma = EM["Adjoint"].asMatrix(Xb)
3755     #
3756     if CM is not None and "Tangent" in CM and U is not None:
3757         Cm = CM["Tangent"].asMatrix(Xb)
3758     else:
3759         Cm = None
3760     #
3761     # Durée d'observation et tailles
3762     if hasattr(Y,"stepnumber"):
3763         duration = Y.stepnumber()
3764         __p = numpy.cumprod(Y.shape())[-1]
3765     else:
3766         duration = 2
3767         __p = numpy.array(Y).size
3768     #
3769     # Précalcul des inversions de B et R
3770     if selfA._parameters["StoreInternalVariables"] \
3771         or selfA._toStore("CostFunctionJ") \
3772         or selfA._toStore("CostFunctionJb") \
3773         or selfA._toStore("CostFunctionJo") \
3774         or selfA._toStore("CurrentOptimum") \
3775         or selfA._toStore("APosterioriCovariance"):
3776         BI = B.getI()
3777         RI = R.getI()
3778     #
3779     __n = Xb.size
3780     #
3781     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
3782         Xn = Xb
3783         Pn = B
3784         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3785         selfA.StoredVariables["Analysis"].store( Xb )
3786         if selfA._toStore("APosterioriCovariance"):
3787             if hasattr(B,"asfullmatrix"):
3788                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
3789             else:
3790                 selfA.StoredVariables["APosterioriCovariance"].store( B )
3791         selfA._setInternalState("seed", numpy.random.get_state())
3792     elif selfA._parameters["nextStep"]:
3793         Xn = selfA._getInternalState("Xn")
3794         Pn = selfA._getInternalState("Pn")
3795     #
3796     if selfA._parameters["EstimationOf"] == "Parameters":
3797         XaMin            = Xn
3798         previousJMinimum = numpy.finfo(float).max
3799     #
3800     for step in range(duration-1):
3801         if hasattr(Y,"store"):
3802             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
3803         else:
3804             Ynpu = numpy.ravel( Y ).reshape((__p,1))
3805         #
3806         if U is not None:
3807             if hasattr(U,"store") and len(U)>1:
3808                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
3809             elif hasattr(U,"store") and len(U)==1:
3810                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
3811             else:
3812                 Un = numpy.asmatrix(numpy.ravel( U )).T
3813         else:
3814             Un = None
3815         #
3816         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
3817             Xn_predicted = Mt * Xn
3818             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
3819                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
3820                 Xn_predicted = Xn_predicted + Cm * Un
3821             Pn_predicted = Q + Mt * (Pn * Ma)
3822         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
3823             # --- > Par principe, M = Id, Q = 0
3824             Xn_predicted = Xn
3825             Pn_predicted = Pn
3826         #
3827         if selfA._parameters["EstimationOf"] == "State":
3828             HX_predicted = Ht * Xn_predicted
3829             _Innovation  = Ynpu - HX_predicted
3830         elif selfA._parameters["EstimationOf"] == "Parameters":
3831             HX_predicted = Ht * Xn_predicted
3832             _Innovation  = Ynpu - HX_predicted
3833             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
3834                 _Innovation = _Innovation - Cm * Un
3835         #
3836         Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
3837         Xn = Xn_predicted + Kn * _Innovation
3838         Pn = Pn_predicted - Kn * Ht * Pn_predicted
3839         #
3840         Xa = Xn # Pointeurs
3841         #--------------------------
3842         selfA._setInternalState("Xn", Xn)
3843         selfA._setInternalState("Pn", Pn)
3844         #--------------------------
3845         #
3846         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3847         # ---> avec analysis
3848         selfA.StoredVariables["Analysis"].store( Xa )
3849         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
3850             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( Ht * Xa )
3851         if selfA._toStore("InnovationAtCurrentAnalysis"):
3852             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
3853         # ---> avec current state
3854         if selfA._parameters["StoreInternalVariables"] \
3855             or selfA._toStore("CurrentState"):
3856             selfA.StoredVariables["CurrentState"].store( Xn )
3857         if selfA._toStore("ForecastState"):
3858             selfA.StoredVariables["ForecastState"].store( Xn_predicted )
3859         if selfA._toStore("ForecastCovariance"):
3860             selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted )
3861         if selfA._toStore("BMA"):
3862             selfA.StoredVariables["BMA"].store( Xn_predicted - Xa )
3863         if selfA._toStore("InnovationAtCurrentState"):
3864             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
3865         if selfA._toStore("SimulatedObservationAtCurrentState") \
3866             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3867             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
3868         # ---> autres
3869         if selfA._parameters["StoreInternalVariables"] \
3870             or selfA._toStore("CostFunctionJ") \
3871             or selfA._toStore("CostFunctionJb") \
3872             or selfA._toStore("CostFunctionJo") \
3873             or selfA._toStore("CurrentOptimum") \
3874             or selfA._toStore("APosterioriCovariance"):
3875             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
3876             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
3877             J   = Jb + Jo
3878             selfA.StoredVariables["CostFunctionJb"].store( Jb )
3879             selfA.StoredVariables["CostFunctionJo"].store( Jo )
3880             selfA.StoredVariables["CostFunctionJ" ].store( J )
3881             #
3882             if selfA._toStore("IndexOfOptimum") \
3883                 or selfA._toStore("CurrentOptimum") \
3884                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
3885                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
3886                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
3887                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3888                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3889             if selfA._toStore("IndexOfOptimum"):
3890                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3891             if selfA._toStore("CurrentOptimum"):
3892                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
3893             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3894                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
3895             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3896                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3897             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3898                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3899             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3900                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3901         if selfA._toStore("APosterioriCovariance"):
3902             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
3903         if selfA._parameters["EstimationOf"] == "Parameters" \
3904             and J < previousJMinimum:
3905             previousJMinimum    = J
3906             XaMin               = Xa
3907             if selfA._toStore("APosterioriCovariance"):
3908                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
3909     #
3910     # Stockage final supplémentaire de l'optimum en estimation de paramètres
3911     # ----------------------------------------------------------------------
3912     if selfA._parameters["EstimationOf"] == "Parameters":
3913         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3914         selfA.StoredVariables["Analysis"].store( XaMin )
3915         if selfA._toStore("APosterioriCovariance"):
3916             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
3917         if selfA._toStore("BMA"):
3918             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
3919     #
3920     return 0
3921
3922 # ==============================================================================
3923 def uskf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3924     """
3925     Unscented Kalman Filter
3926     """
3927     if selfA._parameters["EstimationOf"] == "Parameters":
3928         selfA._parameters["StoreInternalVariables"] = True
3929     #
3930     L     = Xb.size
3931     Alpha = selfA._parameters["Alpha"]
3932     Beta  = selfA._parameters["Beta"]
3933     if selfA._parameters["Kappa"] == 0:
3934         if selfA._parameters["EstimationOf"] == "State":
3935             Kappa = 0
3936         elif selfA._parameters["EstimationOf"] == "Parameters":
3937             Kappa = 3 - L
3938     else:
3939         Kappa = selfA._parameters["Kappa"]
3940     Lambda = float( Alpha**2 ) * ( L + Kappa ) - L
3941     Gamma  = math.sqrt( L + Lambda )
3942     #
3943     Ww = []
3944     Ww.append( 0. )
3945     for i in range(2*L):
3946         Ww.append( 1. / (2.*(L + Lambda)) )
3947     #
3948     Wm = numpy.array( Ww )
3949     Wm[0] = Lambda / (L + Lambda)
3950     Wc = numpy.array( Ww )
3951     Wc[0] = Lambda / (L + Lambda) + (1. - Alpha**2 + Beta)
3952     #
3953     # Opérateurs
3954     Hm = HO["Direct"].appliedControledFormTo
3955     #
3956     if selfA._parameters["EstimationOf"] == "State":
3957         Mm = EM["Direct"].appliedControledFormTo
3958     #
3959     if CM is not None and "Tangent" in CM and U is not None:
3960         Cm = CM["Tangent"].asMatrix(Xb)
3961     else:
3962         Cm = None
3963     #
3964     # Durée d'observation et tailles
3965     if hasattr(Y,"stepnumber"):
3966         duration = Y.stepnumber()
3967         __p = numpy.cumprod(Y.shape())[-1]
3968     else:
3969         duration = 2
3970         __p = numpy.array(Y).size
3971     #
3972     # Précalcul des inversions de B et R
3973     if selfA._parameters["StoreInternalVariables"] \
3974         or selfA._toStore("CostFunctionJ") \
3975         or selfA._toStore("CostFunctionJb") \
3976         or selfA._toStore("CostFunctionJo") \
3977         or selfA._toStore("CurrentOptimum") \
3978         or selfA._toStore("APosterioriCovariance"):
3979         BI = B.getI()
3980         RI = R.getI()
3981     #
3982     __n = Xb.size
3983     #
3984     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
3985         Xn = Xb
3986         if hasattr(B,"asfullmatrix"):
3987             Pn = B.asfullmatrix(__n)
3988         else:
3989             Pn = B
3990         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3991         selfA.StoredVariables["Analysis"].store( Xb )
3992         if selfA._toStore("APosterioriCovariance"):
3993             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
3994     elif selfA._parameters["nextStep"]:
3995         Xn = selfA._getInternalState("Xn")
3996         Pn = selfA._getInternalState("Pn")
3997     #
3998     if selfA._parameters["EstimationOf"] == "Parameters":
3999         XaMin            = Xn
4000         previousJMinimum = numpy.finfo(float).max
4001     #
4002     for step in range(duration-1):
4003         if hasattr(Y,"store"):
4004             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
4005         else:
4006             Ynpu = numpy.ravel( Y ).reshape((__p,1))
4007         #
4008         if U is not None:
4009             if hasattr(U,"store") and len(U)>1:
4010                 Un = numpy.ravel( U[step] ).reshape((-1,1))
4011             elif hasattr(U,"store") and len(U)==1:
4012                 Un = numpy.ravel( U[0] ).reshape((-1,1))
4013             else:
4014                 Un = numpy.ravel( U ).reshape((-1,1))
4015         else:
4016             Un = None
4017         #
4018         Pndemi = numpy.real(scipy.linalg.sqrtm(Pn))
4019         Xnp = numpy.hstack([Xn, Xn+Gamma*Pndemi, Xn-Gamma*Pndemi])
4020         nbSpts = 2*Xn.size+1
4021         #
4022         XEtnnp = []
4023         for point in range(nbSpts):
4024             if selfA._parameters["EstimationOf"] == "State":
4025                 XEtnnpi = numpy.asarray( Mm( (Xnp[:,point], Un) ) ).reshape((-1,1))
4026                 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
4027                     Cm = Cm.reshape(Xn.size,Un.size) # ADAO & check shape
4028                     XEtnnpi = XEtnnpi + Cm * Un
4029             elif selfA._parameters["EstimationOf"] == "Parameters":
4030                 # --- > Par principe, M = Id, Q = 0
4031                 XEtnnpi = Xnp[:,point]
4032             XEtnnp.append( numpy.ravel(XEtnnpi).reshape((-1,1)) )
4033         XEtnnp = numpy.concatenate( XEtnnp, axis=1 )
4034         #
4035         Xncm = ( XEtnnp * Wm ).sum(axis=1)
4036         #
4037         if selfA._parameters["EstimationOf"] == "State":        Pnm = Q
4038         elif selfA._parameters["EstimationOf"] == "Parameters": Pnm = 0.
4039         for point in range(nbSpts):
4040             Pnm += Wc[i] * ((XEtnnp[:,point]-Xncm).reshape((-1,1)) * (XEtnnp[:,point]-Xncm))
4041         #
4042         Pnmdemi = numpy.real(scipy.linalg.sqrtm(Pnm))
4043         #
4044         Xnnp = numpy.hstack([Xncm.reshape((-1,1)), Xncm.reshape((-1,1))+Gamma*Pnmdemi, Xncm.reshape((-1,1))-Gamma*Pnmdemi])
4045         #
4046         Ynnp = []
4047         for point in range(nbSpts):
4048             if selfA._parameters["EstimationOf"] == "State":
4049                 Ynnpi = Hm( (Xnnp[:,point], None) )
4050             elif selfA._parameters["EstimationOf"] == "Parameters":
4051                 Ynnpi = Hm( (Xnnp[:,point], Un) )
4052             Ynnp.append( numpy.ravel(Ynnpi).reshape((-1,1)) )
4053         Ynnp = numpy.concatenate( Ynnp, axis=1 )
4054         #
4055         Yncm = ( Ynnp * Wm ).sum(axis=1)
4056         #
4057         Pyyn = R
4058         Pxyn = 0.
4059         for point in range(nbSpts):
4060             Pyyn += Wc[i] * ((Ynnp[:,point]-Yncm).reshape((-1,1)) * (Ynnp[:,point]-Yncm))
4061             Pxyn += Wc[i] * ((Xnnp[:,point]-Xncm).reshape((-1,1)) * (Ynnp[:,point]-Yncm))
4062         #
4063         _Innovation  = Ynpu - Yncm.reshape((-1,1))
4064         if selfA._parameters["EstimationOf"] == "Parameters":
4065             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
4066                 _Innovation = _Innovation - Cm * Un
4067         #
4068         Kn = Pxyn * Pyyn.I
4069         Xn = Xncm.reshape((-1,1)) + Kn * _Innovation
4070         Pn = Pnm - Kn * Pyyn * Kn.T
4071         #
4072         Xa = Xn # Pointeurs
4073         #--------------------------
4074         selfA._setInternalState("Xn", Xn)
4075         selfA._setInternalState("Pn", Pn)
4076         #--------------------------
4077         #
4078         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
4079         # ---> avec analysis
4080         selfA.StoredVariables["Analysis"].store( Xa )
4081         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
4082             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( Hm((Xa, Un)) )
4083         if selfA._toStore("InnovationAtCurrentAnalysis"):
4084             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
4085         # ---> avec current state
4086         if selfA._parameters["StoreInternalVariables"] \
4087             or selfA._toStore("CurrentState"):
4088             selfA.StoredVariables["CurrentState"].store( Xn )
4089         if selfA._toStore("ForecastState"):
4090             selfA.StoredVariables["ForecastState"].store( Xncm )
4091         if selfA._toStore("ForecastCovariance"):
4092             selfA.StoredVariables["ForecastCovariance"].store( Pnm )
4093         if selfA._toStore("BMA"):
4094             selfA.StoredVariables["BMA"].store( Xncm - Xa )
4095         if selfA._toStore("InnovationAtCurrentState"):
4096             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
4097         if selfA._toStore("SimulatedObservationAtCurrentState") \
4098             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
4099             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( Yncm )
4100         # ---> autres
4101         if selfA._parameters["StoreInternalVariables"] \
4102             or selfA._toStore("CostFunctionJ") \
4103             or selfA._toStore("CostFunctionJb") \
4104             or selfA._toStore("CostFunctionJo") \
4105             or selfA._toStore("CurrentOptimum") \
4106             or selfA._toStore("APosterioriCovariance"):
4107             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
4108             Jo  = float( 0.5 * _Innovation.T * (RI * _Innovation) )
4109             J   = Jb + Jo
4110             selfA.StoredVariables["CostFunctionJb"].store( Jb )
4111             selfA.StoredVariables["CostFunctionJo"].store( Jo )
4112             selfA.StoredVariables["CostFunctionJ" ].store( J )
4113             #
4114             if selfA._toStore("IndexOfOptimum") \
4115                 or selfA._toStore("CurrentOptimum") \
4116                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
4117                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
4118                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
4119                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
4120                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
4121             if selfA._toStore("IndexOfOptimum"):
4122                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
4123             if selfA._toStore("CurrentOptimum"):
4124                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
4125             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
4126                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
4127             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
4128                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
4129             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
4130                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
4131             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
4132                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
4133         if selfA._toStore("APosterioriCovariance"):
4134             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
4135         if selfA._parameters["EstimationOf"] == "Parameters" \
4136             and J < previousJMinimum:
4137             previousJMinimum    = J
4138             XaMin               = Xa
4139             if selfA._toStore("APosterioriCovariance"):
4140                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
4141     #
4142     # Stockage final supplémentaire de l'optimum en estimation de paramètres
4143     # ----------------------------------------------------------------------
4144     if selfA._parameters["EstimationOf"] == "Parameters":
4145         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
4146         selfA.StoredVariables["Analysis"].store( XaMin )
4147         if selfA._toStore("APosterioriCovariance"):
4148             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
4149         if selfA._toStore("BMA"):
4150             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
4151     #
4152     return 0
4153
4154 # ==============================================================================
4155 def van3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
4156     """
4157     3DVAR variational analysis with no inversion of B
4158     """
4159     #
4160     # Initialisations
4161     # ---------------
4162     Hm = HO["Direct"].appliedTo
4163     Ha = HO["Adjoint"].appliedInXTo
4164     #
4165     BT = B.getT()
4166     RI = R.getI()
4167     #
4168     Xini = numpy.zeros(Xb.shape)
4169     #
4170     # Définition de la fonction-coût
4171     # ------------------------------
4172     def CostFunction(v):
4173         _V = numpy.asmatrix(numpy.ravel( v )).T
4174         _X = Xb + B * _V
4175         if selfA._parameters["StoreInternalVariables"] or \
4176             selfA._toStore("CurrentState") or \
4177             selfA._toStore("CurrentOptimum"):
4178             selfA.StoredVariables["CurrentState"].store( _X )
4179         _HX = Hm( _X )
4180         _HX = numpy.asmatrix(numpy.ravel( _HX )).T
4181         _Innovation = Y - _HX
4182         if selfA._toStore("SimulatedObservationAtCurrentState") or \
4183             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
4184             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
4185         if selfA._toStore("InnovationAtCurrentState"):
4186             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
4187         #
4188         Jb  = float( 0.5 * _V.T * BT * _V )
4189         Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
4190         J   = Jb + Jo
4191         #
4192         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
4193         selfA.StoredVariables["CostFunctionJb"].store( Jb )
4194         selfA.StoredVariables["CostFunctionJo"].store( Jo )
4195         selfA.StoredVariables["CostFunctionJ" ].store( J )
4196         if selfA._toStore("IndexOfOptimum") or \
4197             selfA._toStore("CurrentOptimum") or \
4198             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
4199             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
4200             selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
4201             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
4202             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
4203         if selfA._toStore("IndexOfOptimum"):
4204             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
4205         if selfA._toStore("CurrentOptimum"):
4206             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
4207         if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
4208             selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
4209         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
4210             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
4211         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
4212             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
4213         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
4214             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
4215         return J
4216     #
4217     def GradientOfCostFunction(v):
4218         _V = v.reshape((-1,1))
4219         _X = Xb + (B @ _V).reshape((-1,1))
4220         _HX     = Hm( _X ).reshape((-1,1))
4221         GradJb  = BT * _V
4222         GradJo  = - Ha( (_X, RI * (Y - _HX)) )
4223         GradJ   = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
4224         return GradJ
4225     #
4226     # Minimisation de la fonctionnelle
4227     # --------------------------------
4228     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
4229     #
4230     if selfA._parameters["Minimizer"] == "LBFGSB":
4231         if "0.19" <= scipy.version.version <= "1.1.0":
4232             import lbfgsbhlt as optimiseur
4233         else:
4234             import scipy.optimize as optimiseur
4235         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
4236             func        = CostFunction,
4237             x0          = Xini,
4238             fprime      = GradientOfCostFunction,
4239             args        = (),
4240             bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
4241             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
4242             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
4243             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
4244             iprint      = selfA._parameters["optiprint"],
4245             )
4246         nfeval = Informations['funcalls']
4247         rc     = Informations['warnflag']
4248     elif selfA._parameters["Minimizer"] == "TNC":
4249         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
4250             func        = CostFunction,
4251             x0          = Xini,
4252             fprime      = GradientOfCostFunction,
4253             args        = (),
4254             bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
4255             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
4256             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
4257             ftol        = selfA._parameters["CostDecrementTolerance"],
4258             messages    = selfA._parameters["optmessages"],
4259             )
4260     elif selfA._parameters["Minimizer"] == "CG":
4261         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
4262             f           = CostFunction,
4263             x0          = Xini,
4264             fprime      = GradientOfCostFunction,
4265             args        = (),
4266             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
4267             gtol        = selfA._parameters["GradientNormTolerance"],
4268             disp        = selfA._parameters["optdisp"],
4269             full_output = True,
4270             )
4271     elif selfA._parameters["Minimizer"] == "NCG":
4272         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
4273             f           = CostFunction,
4274             x0          = Xini,
4275             fprime      = GradientOfCostFunction,
4276             args        = (),
4277             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
4278             avextol     = selfA._parameters["CostDecrementTolerance"],
4279             disp        = selfA._parameters["optdisp"],
4280             full_output = True,
4281             )
4282     elif selfA._parameters["Minimizer"] == "BFGS":
4283         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
4284             f           = CostFunction,
4285             x0          = Xini,
4286             fprime      = GradientOfCostFunction,
4287             args        = (),
4288             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
4289             gtol        = selfA._parameters["GradientNormTolerance"],
4290             disp        = selfA._parameters["optdisp"],
4291             full_output = True,
4292             )
4293     else:
4294         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
4295     #
4296     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
4297     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
4298     #
4299     # Correction pour pallier a un bug de TNC sur le retour du Minimum
4300     # ----------------------------------------------------------------
4301     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
4302         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
4303     else:
4304         Minimum = Xb + B * Minimum.reshape((-1,1)) # Pas @
4305     #
4306     Xa = Minimum
4307     #--------------------------
4308     #
4309     selfA.StoredVariables["Analysis"].store( Xa )
4310     #
4311     if selfA._toStore("OMA") or \
4312         selfA._toStore("SigmaObs2") or \
4313         selfA._toStore("SimulationQuantiles") or \
4314         selfA._toStore("SimulatedObservationAtOptimum"):
4315         if selfA._toStore("SimulatedObservationAtCurrentState"):
4316             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
4317         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
4318             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
4319         else:
4320             HXa = Hm( Xa )
4321     #
4322     if selfA._toStore("APosterioriCovariance") or \
4323         selfA._toStore("SimulationQuantiles") or \
4324         selfA._toStore("JacobianMatrixAtOptimum") or \
4325         selfA._toStore("KalmanGainAtOptimum"):
4326         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
4327         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
4328     if selfA._toStore("APosterioriCovariance") or \
4329         selfA._toStore("SimulationQuantiles") or \
4330         selfA._toStore("KalmanGainAtOptimum"):
4331         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
4332         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
4333     if selfA._toStore("APosterioriCovariance") or \
4334         selfA._toStore("SimulationQuantiles"):
4335         BI = B.getI()
4336         A = HessienneEstimation(Xa.size, HaM, HtM, BI, RI)
4337     if selfA._toStore("APosterioriCovariance"):
4338         selfA.StoredVariables["APosterioriCovariance"].store( A )
4339     if selfA._toStore("JacobianMatrixAtOptimum"):
4340         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
4341     if selfA._toStore("KalmanGainAtOptimum"):
4342         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
4343         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
4344         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
4345     #
4346     # Calculs et/ou stockages supplémentaires
4347     # ---------------------------------------
4348     if selfA._toStore("Innovation") or \
4349         selfA._toStore("SigmaObs2") or \
4350         selfA._toStore("MahalanobisConsistency") or \
4351         selfA._toStore("OMB"):
4352         d  = Y - HXb
4353     if selfA._toStore("Innovation"):
4354         selfA.StoredVariables["Innovation"].store( d )
4355     if selfA._toStore("BMA"):
4356         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
4357     if selfA._toStore("OMA"):
4358         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
4359     if selfA._toStore("OMB"):
4360         selfA.StoredVariables["OMB"].store( d )
4361     if selfA._toStore("SigmaObs2"):
4362         TraceR = R.trace(Y.size)
4363         selfA.StoredVariables["SigmaObs2"].store( float( (d.T @ (numpy.ravel(Y)-numpy.ravel(HXa))) ) / TraceR )
4364     if selfA._toStore("MahalanobisConsistency"):
4365         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
4366     if selfA._toStore("SimulationQuantiles"):
4367         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
4368     if selfA._toStore("SimulatedObservationAtBackground"):
4369         selfA.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
4370     if selfA._toStore("SimulatedObservationAtOptimum"):
4371         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
4372     #
4373     return 0
4374
4375 # ==============================================================================
4376 if __name__ == "__main__":
4377     print('\n AUTODIAGNOSTIC\n')