Salome HOME
Minor internal modifications, documentation and bounds corrections
[modules/adao.git] / src / daComposant / daCore / NumericObjects.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2021 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 __doc__ = """
24     Définit les objets numériques génériques.
25 """
26 __author__ = "Jean-Philippe ARGAUD"
27
28 import os, time, copy, types, sys, logging
29 import math, numpy, scipy, scipy.optimize, scipy.version
30 from daCore.BasicObjects import Operator
31 from daCore.PlatformInfo import PlatformInfo
32 mpr = PlatformInfo().MachinePrecision()
33 mfp = PlatformInfo().MaximumPrecision()
34 # logging.getLogger().setLevel(logging.DEBUG)
35
36 # ==============================================================================
37 def ExecuteFunction( triplet ):
38     assert len(triplet) == 3, "Incorrect number of arguments"
39     X, xArgs, funcrepr = triplet
40     __X = numpy.asmatrix(numpy.ravel( X )).T
41     __sys_path_tmp = sys.path ; sys.path.insert(0,funcrepr["__userFunction__path"])
42     __module = __import__(funcrepr["__userFunction__modl"], globals(), locals(), [])
43     __fonction = getattr(__module,funcrepr["__userFunction__name"])
44     sys.path = __sys_path_tmp ; del __sys_path_tmp
45     if isinstance(xArgs, dict):
46         __HX  = __fonction( __X, **xArgs )
47     else:
48         __HX  = __fonction( __X )
49     return numpy.ravel( __HX )
50
51 # ==============================================================================
52 class FDApproximation(object):
53     """
54     Cette classe sert d'interface pour définir les opérateurs approximés. A la
55     création d'un objet, en fournissant une fonction "Function", on obtient un
56     objet qui dispose de 3 méthodes "DirectOperator", "TangentOperator" et
57     "AdjointOperator". On contrôle l'approximation DF avec l'incrément
58     multiplicatif "increment" valant par défaut 1%, ou avec l'incrément fixe
59     "dX" qui sera multiplié par "increment" (donc en %), et on effectue de DF
60     centrées si le booléen "centeredDF" est vrai.
61     """
62     def __init__(self,
63             name                  = "FDApproximation",
64             Function              = None,
65             centeredDF            = False,
66             increment             = 0.01,
67             dX                    = None,
68             extraArguments        = None,
69             avoidingRedundancy    = True,
70             toleranceInRedundancy = 1.e-18,
71             lenghtOfRedundancy    = -1,
72             mpEnabled             = False,
73             mpWorkers             = None,
74             mfEnabled             = False,
75             ):
76         self.__name = str(name)
77         self.__extraArgs = extraArguments
78         if mpEnabled:
79             try:
80                 import multiprocessing
81                 self.__mpEnabled = True
82             except ImportError:
83                 self.__mpEnabled = False
84         else:
85             self.__mpEnabled = False
86         self.__mpWorkers = mpWorkers
87         if self.__mpWorkers is not None and self.__mpWorkers < 1:
88             self.__mpWorkers = None
89         logging.debug("FDA Calculs en multiprocessing : %s (nombre de processus : %s)"%(self.__mpEnabled,self.__mpWorkers))
90         #
91         if mfEnabled:
92             self.__mfEnabled = True
93         else:
94             self.__mfEnabled = False
95         logging.debug("FDA Calculs en multifonctions : %s"%(self.__mfEnabled,))
96         #
97         if avoidingRedundancy:
98             self.__avoidRC = True
99             self.__tolerBP = float(toleranceInRedundancy)
100             self.__lenghtRJ = int(lenghtOfRedundancy)
101             self.__listJPCP = [] # Jacobian Previous Calculated Points
102             self.__listJPCI = [] # Jacobian Previous Calculated Increment
103             self.__listJPCR = [] # Jacobian Previous Calculated Results
104             self.__listJPPN = [] # Jacobian Previous Calculated Point Norms
105             self.__listJPIN = [] # Jacobian Previous Calculated Increment Norms
106         else:
107             self.__avoidRC = False
108         #
109         if self.__mpEnabled:
110             if isinstance(Function,types.FunctionType):
111                 logging.debug("FDA Calculs en multiprocessing : FunctionType")
112                 self.__userFunction__name = Function.__name__
113                 try:
114                     mod = os.path.join(Function.__globals__['filepath'],Function.__globals__['filename'])
115                 except:
116                     mod = os.path.abspath(Function.__globals__['__file__'])
117                 if not os.path.isfile(mod):
118                     raise ImportError("No user defined function or method found with the name %s"%(mod,))
119                 self.__userFunction__modl = os.path.basename(mod).replace('.pyc','').replace('.pyo','').replace('.py','')
120                 self.__userFunction__path = os.path.dirname(mod)
121                 del mod
122                 self.__userOperator = Operator( name = self.__name, fromMethod = Function, avoidingRedundancy = self.__avoidRC, inputAsMultiFunction = self.__mfEnabled, extraArguments = self.__extraArgs )
123                 self.__userFunction = self.__userOperator.appliedTo # Pour le calcul Direct
124             elif isinstance(Function,types.MethodType):
125                 logging.debug("FDA Calculs en multiprocessing : MethodType")
126                 self.__userFunction__name = Function.__name__
127                 try:
128                     mod = os.path.join(Function.__globals__['filepath'],Function.__globals__['filename'])
129                 except:
130                     mod = os.path.abspath(Function.__func__.__globals__['__file__'])
131                 if not os.path.isfile(mod):
132                     raise ImportError("No user defined function or method found with the name %s"%(mod,))
133                 self.__userFunction__modl = os.path.basename(mod).replace('.pyc','').replace('.pyo','').replace('.py','')
134                 self.__userFunction__path = os.path.dirname(mod)
135                 del mod
136                 self.__userOperator = Operator( name = self.__name, fromMethod = Function, avoidingRedundancy = self.__avoidRC, inputAsMultiFunction = self.__mfEnabled, extraArguments = self.__extraArgs )
137                 self.__userFunction = self.__userOperator.appliedTo # Pour le calcul Direct
138             else:
139                 raise TypeError("User defined function or method has to be provided for finite differences approximation.")
140         else:
141             self.__userOperator = Operator( name = self.__name, fromMethod = Function, avoidingRedundancy = self.__avoidRC, inputAsMultiFunction = self.__mfEnabled, extraArguments = self.__extraArgs )
142             self.__userFunction = self.__userOperator.appliedTo
143         #
144         self.__centeredDF = bool(centeredDF)
145         if abs(float(increment)) > 1.e-15:
146             self.__increment  = float(increment)
147         else:
148             self.__increment  = 0.01
149         if dX is None:
150             self.__dX     = None
151         else:
152             self.__dX     = numpy.asmatrix(numpy.ravel( dX )).T
153         logging.debug("FDA Reduction des doublons de calcul : %s"%self.__avoidRC)
154         if self.__avoidRC:
155             logging.debug("FDA Tolerance de determination des doublons : %.2e"%self.__tolerBP)
156
157     # ---------------------------------------------------------
158     def __doublon__(self, e, l, n, v=None):
159         __ac, __iac = False, -1
160         for i in range(len(l)-1,-1,-1):
161             if numpy.linalg.norm(e - l[i]) < self.__tolerBP * n[i]:
162                 __ac, __iac = True, i
163                 if v is not None: logging.debug("FDA Cas%s déja calculé, récupération du doublon %i"%(v,__iac))
164                 break
165         return __ac, __iac
166
167     # ---------------------------------------------------------
168     def DirectOperator(self, X, **extraArgs ):
169         """
170         Calcul du direct à l'aide de la fonction fournie.
171
172         NB : les extraArgs sont là pour assurer la compatibilité d'appel, mais
173         ne doivent pas être données ici à la fonction utilisateur.
174         """
175         logging.debug("FDA Calcul DirectOperator (explicite)")
176         if self.__mfEnabled:
177             _HX = self.__userFunction( X, argsAsSerie = True )
178         else:
179             _X = numpy.asmatrix(numpy.ravel( X )).T
180             _HX = numpy.ravel(self.__userFunction( _X ))
181         #
182         return _HX
183
184     # ---------------------------------------------------------
185     def TangentMatrix(self, X ):
186         """
187         Calcul de l'opérateur tangent comme la Jacobienne par différences finies,
188         c'est-à-dire le gradient de H en X. On utilise des différences finies
189         directionnelles autour du point X. X est un numpy.matrix.
190
191         Différences finies centrées (approximation d'ordre 2):
192         1/ Pour chaque composante i de X, on ajoute et on enlève la perturbation
193            dX[i] à la  composante X[i], pour composer X_plus_dXi et X_moins_dXi, et
194            on calcule les réponses HX_plus_dXi = H( X_plus_dXi ) et HX_moins_dXi =
195            H( X_moins_dXi )
196         2/ On effectue les différences (HX_plus_dXi-HX_moins_dXi) et on divise par
197            le pas 2*dXi
198         3/ Chaque résultat, par composante, devient une colonne de la Jacobienne
199
200         Différences finies non centrées (approximation d'ordre 1):
201         1/ Pour chaque composante i de X, on ajoute la perturbation dX[i] à la
202            composante X[i] pour composer X_plus_dXi, et on calcule la réponse
203            HX_plus_dXi = H( X_plus_dXi )
204         2/ On calcule la valeur centrale HX = H(X)
205         3/ On effectue les différences (HX_plus_dXi-HX) et on divise par
206            le pas dXi
207         4/ Chaque résultat, par composante, devient une colonne de la Jacobienne
208
209         """
210         logging.debug("FDA Début du calcul de la Jacobienne")
211         logging.debug("FDA   Incrément de............: %s*X"%float(self.__increment))
212         logging.debug("FDA   Approximation centrée...: %s"%(self.__centeredDF))
213         #
214         if X is None or len(X)==0:
215             raise ValueError("Nominal point X for approximate derivatives can not be None or void (given X: %s)."%(str(X),))
216         #
217         _X = numpy.asmatrix(numpy.ravel( X )).T
218         #
219         if self.__dX is None:
220             _dX  = self.__increment * _X
221         else:
222             _dX = numpy.asmatrix(numpy.ravel( self.__dX )).T
223         #
224         if (_dX == 0.).any():
225             moyenne = _dX.mean()
226             if moyenne == 0.:
227                 _dX = numpy.where( _dX == 0., float(self.__increment), _dX )
228             else:
229                 _dX = numpy.where( _dX == 0., moyenne, _dX )
230         #
231         __alreadyCalculated  = False
232         if self.__avoidRC:
233             __bidon, __alreadyCalculatedP = self.__doublon__(_X,  self.__listJPCP, self.__listJPPN, None)
234             __bidon, __alreadyCalculatedI = self.__doublon__(_dX, self.__listJPCI, self.__listJPIN, None)
235             if __alreadyCalculatedP == __alreadyCalculatedI > -1:
236                 __alreadyCalculated, __i = True, __alreadyCalculatedP
237                 logging.debug("FDA Cas J déja calculé, récupération du doublon %i"%__i)
238         #
239         if __alreadyCalculated:
240             logging.debug("FDA   Calcul Jacobienne (par récupération du doublon %i)"%__i)
241             _Jacobienne = self.__listJPCR[__i]
242         else:
243             logging.debug("FDA   Calcul Jacobienne (explicite)")
244             if self.__centeredDF:
245                 #
246                 if self.__mpEnabled and not self.__mfEnabled:
247                     funcrepr = {
248                         "__userFunction__path" : self.__userFunction__path,
249                         "__userFunction__modl" : self.__userFunction__modl,
250                         "__userFunction__name" : self.__userFunction__name,
251                     }
252                     _jobs = []
253                     for i in range( len(_dX) ):
254                         _dXi            = _dX[i]
255                         _X_plus_dXi     = numpy.array( _X.A1, dtype=float )
256                         _X_plus_dXi[i]  = _X[i] + _dXi
257                         _X_moins_dXi    = numpy.array( _X.A1, dtype=float )
258                         _X_moins_dXi[i] = _X[i] - _dXi
259                         #
260                         _jobs.append( (_X_plus_dXi,  self.__extraArgs, funcrepr) )
261                         _jobs.append( (_X_moins_dXi, self.__extraArgs, funcrepr) )
262                     #
263                     import multiprocessing
264                     self.__pool = multiprocessing.Pool(self.__mpWorkers)
265                     _HX_plusmoins_dX = self.__pool.map( ExecuteFunction, _jobs )
266                     self.__pool.close()
267                     self.__pool.join()
268                     #
269                     _Jacobienne  = []
270                     for i in range( len(_dX) ):
271                         _Jacobienne.append( numpy.ravel( _HX_plusmoins_dX[2*i] - _HX_plusmoins_dX[2*i+1] ) / (2.*_dX[i]) )
272                     #
273                 elif self.__mfEnabled:
274                     _xserie = []
275                     for i in range( len(_dX) ):
276                         _dXi            = _dX[i]
277                         _X_plus_dXi     = numpy.array( _X.A1, dtype=float )
278                         _X_plus_dXi[i]  = _X[i] + _dXi
279                         _X_moins_dXi    = numpy.array( _X.A1, dtype=float )
280                         _X_moins_dXi[i] = _X[i] - _dXi
281                         #
282                         _xserie.append( _X_plus_dXi )
283                         _xserie.append( _X_moins_dXi )
284                     #
285                     _HX_plusmoins_dX = self.DirectOperator( _xserie )
286                      #
287                     _Jacobienne  = []
288                     for i in range( len(_dX) ):
289                         _Jacobienne.append( numpy.ravel( _HX_plusmoins_dX[2*i] - _HX_plusmoins_dX[2*i+1] ) / (2.*_dX[i]) )
290                     #
291                 else:
292                     _Jacobienne  = []
293                     for i in range( _dX.size ):
294                         _dXi            = _dX[i]
295                         _X_plus_dXi     = numpy.array( _X.A1, dtype=float )
296                         _X_plus_dXi[i]  = _X[i] + _dXi
297                         _X_moins_dXi    = numpy.array( _X.A1, dtype=float )
298                         _X_moins_dXi[i] = _X[i] - _dXi
299                         #
300                         _HX_plus_dXi    = self.DirectOperator( _X_plus_dXi )
301                         _HX_moins_dXi   = self.DirectOperator( _X_moins_dXi )
302                         #
303                         _Jacobienne.append( numpy.ravel( _HX_plus_dXi - _HX_moins_dXi ) / (2.*_dXi) )
304                 #
305             else:
306                 #
307                 if self.__mpEnabled and not self.__mfEnabled:
308                     funcrepr = {
309                         "__userFunction__path" : self.__userFunction__path,
310                         "__userFunction__modl" : self.__userFunction__modl,
311                         "__userFunction__name" : self.__userFunction__name,
312                     }
313                     _jobs = []
314                     _jobs.append( (_X.A1, self.__extraArgs, funcrepr) )
315                     for i in range( len(_dX) ):
316                         _X_plus_dXi    = numpy.array( _X.A1, dtype=float )
317                         _X_plus_dXi[i] = _X[i] + _dX[i]
318                         #
319                         _jobs.append( (_X_plus_dXi, self.__extraArgs, funcrepr) )
320                     #
321                     import multiprocessing
322                     self.__pool = multiprocessing.Pool(self.__mpWorkers)
323                     _HX_plus_dX = self.__pool.map( ExecuteFunction, _jobs )
324                     self.__pool.close()
325                     self.__pool.join()
326                     #
327                     _HX = _HX_plus_dX.pop(0)
328                     #
329                     _Jacobienne = []
330                     for i in range( len(_dX) ):
331                         _Jacobienne.append( numpy.ravel(( _HX_plus_dX[i] - _HX ) / _dX[i]) )
332                     #
333                 elif self.__mfEnabled:
334                     _xserie = []
335                     _xserie.append( _X.A1 )
336                     for i in range( len(_dX) ):
337                         _X_plus_dXi    = numpy.array( _X.A1, dtype=float )
338                         _X_plus_dXi[i] = _X[i] + _dX[i]
339                         #
340                         _xserie.append( _X_plus_dXi )
341                     #
342                     _HX_plus_dX = self.DirectOperator( _xserie )
343                     #
344                     _HX = _HX_plus_dX.pop(0)
345                     #
346                     _Jacobienne = []
347                     for i in range( len(_dX) ):
348                         _Jacobienne.append( numpy.ravel(( _HX_plus_dX[i] - _HX ) / _dX[i]) )
349                    #
350                 else:
351                     _Jacobienne  = []
352                     _HX = self.DirectOperator( _X )
353                     for i in range( _dX.size ):
354                         _dXi            = _dX[i]
355                         _X_plus_dXi     = numpy.array( _X.A1, dtype=float )
356                         _X_plus_dXi[i]  = _X[i] + _dXi
357                         #
358                         _HX_plus_dXi = self.DirectOperator( _X_plus_dXi )
359                         #
360                         _Jacobienne.append( numpy.ravel(( _HX_plus_dXi - _HX ) / _dXi) )
361                 #
362             #
363             _Jacobienne = numpy.asmatrix( numpy.vstack( _Jacobienne ) ).T
364             if self.__avoidRC:
365                 if self.__lenghtRJ < 0: self.__lenghtRJ = 2 * _X.size
366                 while len(self.__listJPCP) > self.__lenghtRJ:
367                     self.__listJPCP.pop(0)
368                     self.__listJPCI.pop(0)
369                     self.__listJPCR.pop(0)
370                     self.__listJPPN.pop(0)
371                     self.__listJPIN.pop(0)
372                 self.__listJPCP.append( copy.copy(_X) )
373                 self.__listJPCI.append( copy.copy(_dX) )
374                 self.__listJPCR.append( copy.copy(_Jacobienne) )
375                 self.__listJPPN.append( numpy.linalg.norm(_X) )
376                 self.__listJPIN.append( numpy.linalg.norm(_Jacobienne) )
377         #
378         logging.debug("FDA Fin du calcul de la Jacobienne")
379         #
380         return _Jacobienne
381
382     # ---------------------------------------------------------
383     def TangentOperator(self, paire, **extraArgs ):
384         """
385         Calcul du tangent à l'aide de la Jacobienne.
386
387         NB : les extraArgs sont là pour assurer la compatibilité d'appel, mais
388         ne doivent pas être données ici à la fonction utilisateur.
389         """
390         if self.__mfEnabled:
391             assert len(paire) == 1, "Incorrect lenght of arguments"
392             _paire = paire[0]
393             assert len(_paire) == 2, "Incorrect number of arguments"
394         else:
395             assert len(paire) == 2, "Incorrect number of arguments"
396             _paire = paire
397         X, dX = _paire
398         _Jacobienne = self.TangentMatrix( X )
399         if dX is None or len(dX) == 0:
400             #
401             # Calcul de la forme matricielle si le second argument est None
402             # -------------------------------------------------------------
403             if self.__mfEnabled: return [_Jacobienne,]
404             else:                return _Jacobienne
405         else:
406             #
407             # Calcul de la valeur linéarisée de H en X appliqué à dX
408             # ------------------------------------------------------
409             _dX = numpy.asmatrix(numpy.ravel( dX )).T
410             _HtX = numpy.dot(_Jacobienne, _dX)
411             if self.__mfEnabled: return [_HtX.A1,]
412             else:                return _HtX.A1
413
414     # ---------------------------------------------------------
415     def AdjointOperator(self, paire, **extraArgs ):
416         """
417         Calcul de l'adjoint à l'aide de la Jacobienne.
418
419         NB : les extraArgs sont là pour assurer la compatibilité d'appel, mais
420         ne doivent pas être données ici à la fonction utilisateur.
421         """
422         if self.__mfEnabled:
423             assert len(paire) == 1, "Incorrect lenght of arguments"
424             _paire = paire[0]
425             assert len(_paire) == 2, "Incorrect number of arguments"
426         else:
427             assert len(paire) == 2, "Incorrect number of arguments"
428             _paire = paire
429         X, Y = _paire
430         _JacobienneT = self.TangentMatrix( X ).T
431         if Y is None or len(Y) == 0:
432             #
433             # Calcul de la forme matricielle si le second argument est None
434             # -------------------------------------------------------------
435             if self.__mfEnabled: return [_JacobienneT,]
436             else:                return _JacobienneT
437         else:
438             #
439             # Calcul de la valeur de l'adjoint en X appliqué à Y
440             # --------------------------------------------------
441             _Y = numpy.asmatrix(numpy.ravel( Y )).T
442             _HaY = numpy.dot(_JacobienneT, _Y)
443             if self.__mfEnabled: return [_HaY.A1,]
444             else:                return _HaY.A1
445
446 # ==============================================================================
447 def EnsembleOfCenteredPerturbations( _bgcenter, _bgcovariance, _nbmembers ):
448     "Génération d'un ensemble de taille _nbmembers-1 d'états aléatoires centrés"
449     #
450     _bgcenter = numpy.ravel(_bgcenter)[:,None]
451     if _nbmembers < 1:
452         raise ValueError("Number of members has to be strictly more than 1 (given number: %s)."%(str(_nbmembers),))
453     #
454     if _bgcovariance is None:
455         BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers)
456     else:
457         _Z = numpy.random.multivariate_normal(numpy.zeros(_bgcenter.size), _bgcovariance, size=_nbmembers).T
458         BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers) + _Z
459     #
460     return BackgroundEnsemble
461
462 # ==============================================================================
463 def EnsembleOfBackgroundPerturbations( _bgcenter, _bgcovariance, _nbmembers, _withSVD = True):
464     "Génération d'un ensemble de taille _nbmembers-1 d'états aléatoires centrés"
465     def __CenteredRandomAnomalies(Zr, N):
466         """
467         Génère une matrice de N anomalies aléatoires centrées sur Zr selon les
468         notes manuscrites de MB et conforme au code de PS avec eps = -1
469         """
470         eps = -1
471         Q = numpy.identity(N-1)-numpy.ones((N-1,N-1))/numpy.sqrt(N)/(numpy.sqrt(N)-eps)
472         Q = numpy.concatenate((Q, [eps*numpy.ones(N-1)/numpy.sqrt(N)]), axis=0)
473         R, _ = numpy.linalg.qr(numpy.random.normal(size = (N-1,N-1)))
474         Q = numpy.dot(Q,R)
475         Zr = numpy.dot(Q,Zr)
476         return Zr.T
477     #
478     _bgcenter = numpy.ravel(_bgcenter).reshape((-1,1))
479     if _nbmembers < 1:
480         raise ValueError("Number of members has to be strictly more than 1 (given number: %s)."%(str(_nbmembers),))
481     if _bgcovariance is None:
482         BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers)
483     else:
484         if _withSVD:
485             U, s, V = numpy.linalg.svd(_bgcovariance, full_matrices=False)
486             _nbctl = _bgcenter.size
487             if _nbmembers > _nbctl:
488                 _Z = numpy.concatenate((numpy.dot(
489                     numpy.diag(numpy.sqrt(s[:_nbctl])), V[:_nbctl]),
490                     numpy.random.multivariate_normal(numpy.zeros(_nbctl),_bgcovariance,_nbmembers-1-_nbctl)), axis = 0)
491             else:
492                 _Z = numpy.dot(numpy.diag(numpy.sqrt(s[:_nbmembers-1])), V[:_nbmembers-1])
493             _Zca = __CenteredRandomAnomalies(_Z, _nbmembers)
494             BackgroundEnsemble = _bgcenter + _Zca
495         else:
496             if max(abs(_bgcovariance.flatten())) > 0:
497                 _nbctl = _bgcenter.size
498                 _Z = numpy.random.multivariate_normal(numpy.zeros(_nbctl),_bgcovariance,_nbmembers-1)
499                 _Zca = __CenteredRandomAnomalies(_Z, _nbmembers)
500                 BackgroundEnsemble = _bgcenter + _Zca
501             else:
502                 BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers)
503     #
504     return BackgroundEnsemble
505
506 # ==============================================================================
507 def EnsembleMean( __Ensemble ):
508     "Renvoie la moyenne empirique d'un ensemble"
509     return numpy.asarray(__Ensemble).mean(axis=1, dtype=mfp).astype('float').reshape((-1,1))
510
511 # ==============================================================================
512 def EnsembleOfAnomalies( __Ensemble, __OptMean = None, __Normalisation = 1.):
513     "Renvoie les anomalies centrées à partir d'un ensemble"
514     if __OptMean is None:
515         __Em = EnsembleMean( __Ensemble )
516     else:
517         __Em = numpy.ravel( __OptMean ).reshape((-1,1))
518     #
519     return __Normalisation * (numpy.asarray( __Ensemble ) - __Em)
520
521 # ==============================================================================
522 def EnsembleErrorCovariance( __Ensemble, __quick = False ):
523     "Renvoie l'estimation empirique de la covariance d'ensemble"
524     if __quick:
525         # Covariance rapide mais rarement définie positive
526         __Covariance = numpy.cov( __Ensemble )
527     else:
528         # Résultat souvent identique à numpy.cov, mais plus robuste
529         __n, __m = numpy.asarray( __Ensemble ).shape
530         __Anomalies = EnsembleOfAnomalies( __Ensemble )
531         # Estimation empirique
532         __Covariance = ( __Anomalies @ __Anomalies.T ) / (__m-1)
533         # Assure la symétrie
534         __Covariance = ( __Covariance + __Covariance.T ) * 0.5
535         # Assure la positivité
536         __epsilon    = mpr*numpy.trace( __Covariance )
537         __Covariance = __Covariance + __epsilon * numpy.identity(__n)
538     #
539     return __Covariance
540
541 # ==============================================================================
542 def EnsemblePerturbationWithGivenCovariance( __Ensemble, __Covariance, __Seed=None ):
543     "Ajout d'une perturbation à chaque membre d'un ensemble selon une covariance prescrite"
544     if hasattr(__Covariance,"assparsematrix"):
545         if (abs(__Ensemble).mean() > mpr) and (abs(__Covariance.assparsematrix())/abs(__Ensemble).mean() < mpr).all():
546             # Traitement d'une covariance nulle ou presque
547             return __Ensemble
548         if (abs(__Ensemble).mean() <= mpr) and (abs(__Covariance.assparsematrix()) < mpr).all():
549             # Traitement d'une covariance nulle ou presque
550             return __Ensemble
551     else:
552         if (abs(__Ensemble).mean() > mpr) and (abs(__Covariance)/abs(__Ensemble).mean() < mpr).all():
553             # Traitement d'une covariance nulle ou presque
554             return __Ensemble
555         if (abs(__Ensemble).mean() <= mpr) and (abs(__Covariance) < mpr).all():
556             # Traitement d'une covariance nulle ou presque
557             return __Ensemble
558     #
559     __n, __m = __Ensemble.shape
560     if __Seed is not None: numpy.random.seed(__Seed)
561     #
562     if hasattr(__Covariance,"isscalar") and __Covariance.isscalar():
563         # Traitement d'une covariance multiple de l'identité
564         __zero = 0.
565         __std  = numpy.sqrt(__Covariance.assparsematrix())
566         __Ensemble += numpy.random.normal(__zero, __std, size=(__m,__n)).T
567     #
568     elif hasattr(__Covariance,"isvector") and __Covariance.isvector():
569         # Traitement d'une covariance diagonale avec variances non identiques
570         __zero = numpy.zeros(__n)
571         __std  = numpy.sqrt(__Covariance.assparsematrix())
572         __Ensemble += numpy.asarray([numpy.random.normal(__zero, __std) for i in range(__m)]).T
573     #
574     elif hasattr(__Covariance,"ismatrix") and __Covariance.ismatrix():
575         # Traitement d'une covariance pleine
576         __Ensemble += numpy.random.multivariate_normal(numpy.zeros(__n), __Covariance.asfullmatrix(__n), size=__m).T
577     #
578     elif isinstance(__Covariance, numpy.ndarray):
579         # Traitement d'une covariance numpy pleine, sachant qu'on arrive ici en dernier
580         __Ensemble += numpy.random.multivariate_normal(numpy.zeros(__n), __Covariance, size=__m).T
581     #
582     else:
583         raise ValueError("Error in ensemble perturbation with inadequate covariance specification")
584     #
585     return __Ensemble
586
587 # ==============================================================================
588 def CovarianceInflation(
589         InputCovOrEns,
590         InflationType   = None,
591         InflationFactor = None,
592         BackgroundCov   = None,
593         ):
594     """
595     Inflation applicable soit sur Pb ou Pa, soit sur les ensembles EXb ou EXa
596
597     Synthèse : Hunt 2007, section 2.3.5
598     """
599     if InflationFactor is None:
600         return InputCovOrEns
601     else:
602         InflationFactor = float(InflationFactor)
603     #
604     if InflationType in ["MultiplicativeOnAnalysisCovariance", "MultiplicativeOnBackgroundCovariance"]:
605         if InflationFactor < 1.:
606             raise ValueError("Inflation factor for multiplicative inflation has to be greater or equal than 1.")
607         if InflationFactor < 1.+mpr:
608             return InputCovOrEns
609         OutputCovOrEns = InflationFactor**2 * InputCovOrEns
610     #
611     elif InflationType in ["MultiplicativeOnAnalysisAnomalies", "MultiplicativeOnBackgroundAnomalies"]:
612         if InflationFactor < 1.:
613             raise ValueError("Inflation factor for multiplicative inflation has to be greater or equal than 1.")
614         if InflationFactor < 1.+mpr:
615             return InputCovOrEns
616         InputCovOrEnsMean = InputCovOrEns.mean(axis=1, dtype=mfp).astype('float')
617         OutputCovOrEns = InputCovOrEnsMean[:,numpy.newaxis] \
618             + InflationFactor * (InputCovOrEns - InputCovOrEnsMean[:,numpy.newaxis])
619     #
620     elif InflationType in ["AdditiveOnAnalysisCovariance", "AdditiveOnBackgroundCovariance"]:
621         if InflationFactor < 0.:
622             raise ValueError("Inflation factor for additive inflation has to be greater or equal than 0.")
623         if InflationFactor < mpr:
624             return InputCovOrEns
625         __n, __m = numpy.asarray(InputCovOrEns).shape
626         if __n != __m:
627             raise ValueError("Additive inflation can only be applied to squared (covariance) matrix.")
628         OutputCovOrEns = (1. - InflationFactor) * InputCovOrEns + InflationFactor * numpy.identity(__n)
629     #
630     elif InflationType == "HybridOnBackgroundCovariance":
631         if InflationFactor < 0.:
632             raise ValueError("Inflation factor for hybrid inflation has to be greater or equal than 0.")
633         if InflationFactor < mpr:
634             return InputCovOrEns
635         __n, __m = numpy.asarray(InputCovOrEns).shape
636         if __n != __m:
637             raise ValueError("Additive inflation can only be applied to squared (covariance) matrix.")
638         if BackgroundCov is None:
639             raise ValueError("Background covariance matrix B has to be given for hybrid inflation.")
640         if InputCovOrEns.shape != BackgroundCov.shape:
641             raise ValueError("Ensemble covariance matrix has to be of same size than background covariance matrix B.")
642         OutputCovOrEns = (1. - InflationFactor) * InputCovOrEns + InflationFactor * BackgroundCov
643     #
644     elif InflationType == "Relaxation":
645         raise NotImplementedError("InflationType Relaxation")
646     #
647     else:
648         raise ValueError("Error in inflation type, '%s' is not a valid keyword."%InflationType)
649     #
650     return OutputCovOrEns
651
652 # ==============================================================================
653 def QuantilesEstimations(selfA, A, Xa, HXa = None, Hm = None, HtM = None):
654     "Estimation des quantiles a posteriori (selfA est modifié)"
655     nbsamples = selfA._parameters["NumberOfSamplesForQuantiles"]
656     #
657     # Traitement des bornes
658     if "StateBoundsForQuantiles" in selfA._parameters:
659         LBounds = selfA._parameters["StateBoundsForQuantiles"] # Prioritaire
660     elif "Bounds" in selfA._parameters:
661         LBounds = selfA._parameters["Bounds"]  # Défaut raisonnable
662     else:
663         LBounds = None
664     if LBounds is not None:
665         LBounds = ForceNumericBounds( LBounds )
666     _Xa = numpy.ravel(Xa)
667     #
668     # Échantillonnage des états
669     YfQ  = None
670     EXr  = None
671     for i in range(nbsamples):
672         if selfA._parameters["SimulationForQuantiles"] == "Linear" and HtM is not None and HXa is not None:
673             dXr = (numpy.random.multivariate_normal(_Xa,A) - _Xa).reshape((-1,1))
674             if LBounds is not None: # "EstimateProjection" par défaut
675                 dXr = numpy.max(numpy.hstack((dXr,LBounds[:,0].reshape((-1,1))) - Xa),axis=1)
676                 dXr = numpy.min(numpy.hstack((dXr,LBounds[:,1].reshape((-1,1))) - Xa),axis=1)
677             dYr = HtM @ dXr
678             Yr = HXa.reshape((-1,1)) + dYr
679             if selfA._toStore("SampledStateForQuantiles"): Xr = _Xa + numpy.ravel(dXr)
680         elif selfA._parameters["SimulationForQuantiles"] == "NonLinear" and Hm is not None:
681             Xr = numpy.random.multivariate_normal(_Xa,A)
682             if LBounds is not None: # "EstimateProjection" par défaut
683                 Xr = numpy.max(numpy.hstack((Xr.reshape((-1,1)),LBounds[:,0].reshape((-1,1)))),axis=1)
684                 Xr = numpy.min(numpy.hstack((Xr.reshape((-1,1)),LBounds[:,1].reshape((-1,1)))),axis=1)
685             Yr = Hm( Xr )
686         else:
687             raise ValueError("Quantile simulations has only to be Linear or NonLinear.")
688         #
689         if YfQ is None:
690             YfQ = Yr.reshape((-1,1))
691             if selfA._toStore("SampledStateForQuantiles"): EXr = Xr.reshape((-1,1))
692         else:
693             YfQ = numpy.hstack((YfQ,Yr.reshape((-1,1))))
694             if selfA._toStore("SampledStateForQuantiles"): EXr = numpy.hstack((EXr,Xr.reshape((-1,1))))
695     #
696     # Extraction des quantiles
697     YfQ.sort(axis=-1)
698     YQ = None
699     for quantile in selfA._parameters["Quantiles"]:
700         if not (0. <= float(quantile) <= 1.): continue
701         indice = int(nbsamples * float(quantile) - 1./nbsamples)
702         if YQ is None: YQ = YfQ[:,indice].reshape((-1,1))
703         else:          YQ = numpy.hstack((YQ,YfQ[:,indice].reshape((-1,1))))
704     if YQ is not None: # Liste non vide de quantiles
705         selfA.StoredVariables["SimulationQuantiles"].store( YQ )
706     if selfA._toStore("SampledStateForQuantiles"):
707         selfA.StoredVariables["SampledStateForQuantiles"].store( EXr )
708     #
709     return 0
710
711 # ==============================================================================
712 def ForceNumericBounds( __Bounds ):
713     "Force les bornes à être des valeurs numériques, sauf si globalement None"
714     # Conserve une valeur par défaut à None s'il n'y a pas de bornes
715     if __Bounds is None: return None
716     # Converti toutes les bornes individuelles None à +/- l'infini
717     __Bounds = numpy.asarray( __Bounds, dtype=float )
718     if len(__Bounds.shape) != 2 or min(__Bounds.shape) <= 0 or __Bounds.shape[1] != 2:
719         raise ValueError("Incorrectly defined bounds data")
720     __Bounds[numpy.isnan(__Bounds)[:,0],0] = -sys.float_info.max
721     __Bounds[numpy.isnan(__Bounds)[:,1],1] =  sys.float_info.max
722     return __Bounds
723
724 # ==============================================================================
725 def RecentredBounds( __Bounds, __Center):
726     # Conserve une valeur par défaut à None s'il n'y a pas de bornes
727     if __Bounds is None: return None
728     # Recentre les valeurs numériques de bornes
729     return ForceNumericBounds( __Bounds ) - numpy.ravel( __Center ).transpose((-1,1))
730
731 # ==============================================================================
732 def cekf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
733     """
734     Contrained Extended Kalman Filter
735     """
736     if selfA._parameters["EstimationOf"] == "Parameters":
737         selfA._parameters["StoreInternalVariables"] = True
738     selfA._parameters["Bounds"] = ForceNumericBounds( selfA._parameters["Bounds"] )
739     #
740     # Opérateurs
741     H = HO["Direct"].appliedControledFormTo
742     #
743     if selfA._parameters["EstimationOf"] == "State":
744         M = EM["Direct"].appliedControledFormTo
745     #
746     if CM is not None and "Tangent" in CM and U is not None:
747         Cm = CM["Tangent"].asMatrix(Xb)
748     else:
749         Cm = None
750     #
751     # Durée d'observation et tailles
752     if hasattr(Y,"stepnumber"):
753         duration = Y.stepnumber()
754         __p = numpy.cumprod(Y.shape())[-1]
755     else:
756         duration = 2
757         __p = numpy.array(Y).size
758     #
759     # Précalcul des inversions de B et R
760     if selfA._parameters["StoreInternalVariables"] \
761         or selfA._toStore("CostFunctionJ") \
762         or selfA._toStore("CostFunctionJb") \
763         or selfA._toStore("CostFunctionJo") \
764         or selfA._toStore("CurrentOptimum") \
765         or selfA._toStore("APosterioriCovariance"):
766         BI = B.getI()
767         RI = R.getI()
768     #
769     __n = Xb.size
770     #
771     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
772         Xn = Xb
773         Pn = B
774         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
775         selfA.StoredVariables["Analysis"].store( Xb )
776         if selfA._toStore("APosterioriCovariance"):
777             if hasattr(B,"asfullmatrix"):
778                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
779             else:
780                 selfA.StoredVariables["APosterioriCovariance"].store( B )
781         selfA._setInternalState("seed", numpy.random.get_state())
782     elif selfA._parameters["nextStep"]:
783         Xn = selfA._getInternalState("Xn")
784         Pn = selfA._getInternalState("Pn")
785     #
786     if selfA._parameters["EstimationOf"] == "Parameters":
787         XaMin            = Xn
788         previousJMinimum = numpy.finfo(float).max
789     #
790     for step in range(duration-1):
791         if hasattr(Y,"store"):
792             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
793         else:
794             Ynpu = numpy.ravel( Y ).reshape((__p,1))
795         #
796         Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xn)
797         Ht = Ht.reshape(Ynpu.size,Xn.size) # ADAO & check shape
798         Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = Xn)
799         Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape
800         #
801         if selfA._parameters["EstimationOf"] == "State":
802             Mt = EM["Tangent"].asMatrix(ValueForMethodForm = Xn)
803             Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape
804             Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = Xn)
805             Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape
806         #
807         if U is not None:
808             if hasattr(U,"store") and len(U)>1:
809                 Un = numpy.ravel( U[step] ).reshape((-1,1))
810             elif hasattr(U,"store") and len(U)==1:
811                 Un = numpy.ravel( U[0] ).reshape((-1,1))
812             else:
813                 Un = numpy.ravel( U ).reshape((-1,1))
814         else:
815             Un = None
816         #
817         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
818             Xn = numpy.max(numpy.hstack((Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
819             Xn = numpy.min(numpy.hstack((Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
820         #
821         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
822             Xn_predicted = numpy.ravel( M( (Xn, Un) ) ).reshape((__n,1))
823             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
824                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
825                 Xn_predicted = Xn_predicted + Cm * Un
826             Pn_predicted = Q + Mt * Pn * Ma
827         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
828             # --- > Par principe, M = Id, Q = 0
829             Xn_predicted = Xn
830             Pn_predicted = Pn
831         #
832         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
833             Xn_predicted = numpy.max(numpy.hstack((Xn_predicted,numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
834             Xn_predicted = numpy.min(numpy.hstack((Xn_predicted,numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
835         #
836         if selfA._parameters["EstimationOf"] == "State":
837             HX_predicted = numpy.ravel( H( (Xn_predicted, None) ) ).reshape((__p,1))
838             _Innovation  = Ynpu - HX_predicted
839         elif selfA._parameters["EstimationOf"] == "Parameters":
840             HX_predicted = numpy.ravel( H( (Xn_predicted, Un) ) ).reshape((__p,1))
841             _Innovation  = Ynpu - HX_predicted
842             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
843                 _Innovation = _Innovation - Cm * Un
844         #
845         Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
846         Xn = Xn_predicted + Kn * _Innovation
847         Pn = Pn_predicted - Kn * Ht * Pn_predicted
848         #
849         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
850             Xn = numpy.max(numpy.hstack((Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
851             Xn = numpy.min(numpy.hstack((Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
852         #
853         Xa = Xn # Pointeurs
854         #--------------------------
855         selfA._setInternalState("Xn", Xn)
856         selfA._setInternalState("Pn", Pn)
857         #--------------------------
858         #
859         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
860         # ---> avec analysis
861         selfA.StoredVariables["Analysis"].store( Xa )
862         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
863             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( H((Xa, Un)) )
864         if selfA._toStore("InnovationAtCurrentAnalysis"):
865             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
866         # ---> avec current state
867         if selfA._parameters["StoreInternalVariables"] \
868             or selfA._toStore("CurrentState"):
869             selfA.StoredVariables["CurrentState"].store( Xn )
870         if selfA._toStore("ForecastState"):
871             selfA.StoredVariables["ForecastState"].store( Xn_predicted )
872         if selfA._toStore("ForecastCovariance"):
873             selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted )
874         if selfA._toStore("BMA"):
875             selfA.StoredVariables["BMA"].store( Xn_predicted - Xa )
876         if selfA._toStore("InnovationAtCurrentState"):
877             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
878         if selfA._toStore("SimulatedObservationAtCurrentState") \
879             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
880             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
881         # ---> autres
882         if selfA._parameters["StoreInternalVariables"] \
883             or selfA._toStore("CostFunctionJ") \
884             or selfA._toStore("CostFunctionJb") \
885             or selfA._toStore("CostFunctionJo") \
886             or selfA._toStore("CurrentOptimum") \
887             or selfA._toStore("APosterioriCovariance"):
888             Jb  = float( 0.5 * (Xa - Xb).T @ (BI @ (Xa - Xb)) )
889             Jo  = float( 0.5 * _Innovation.T @ (RI @ _Innovation) )
890             J   = Jb + Jo
891             selfA.StoredVariables["CostFunctionJb"].store( Jb )
892             selfA.StoredVariables["CostFunctionJo"].store( Jo )
893             selfA.StoredVariables["CostFunctionJ" ].store( J )
894             #
895             if selfA._toStore("IndexOfOptimum") \
896                 or selfA._toStore("CurrentOptimum") \
897                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
898                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
899                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
900                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
901                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
902             if selfA._toStore("IndexOfOptimum"):
903                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
904             if selfA._toStore("CurrentOptimum"):
905                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
906             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
907                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
908             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
909                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
910             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
911                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
912             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
913                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
914         if selfA._toStore("APosterioriCovariance"):
915             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
916         if selfA._parameters["EstimationOf"] == "Parameters" \
917             and J < previousJMinimum:
918             previousJMinimum    = J
919             XaMin               = Xa
920             if selfA._toStore("APosterioriCovariance"):
921                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
922     #
923     # Stockage final supplémentaire de l'optimum en estimation de paramètres
924     # ----------------------------------------------------------------------
925     if selfA._parameters["EstimationOf"] == "Parameters":
926         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
927         selfA.StoredVariables["Analysis"].store( XaMin )
928         if selfA._toStore("APosterioriCovariance"):
929             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
930         if selfA._toStore("BMA"):
931             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
932     #
933     return 0
934
935 # ==============================================================================
936 def enks(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="EnKS16-KalmanFilterFormula"):
937     """
938     EnKS
939     """
940     #
941     # Opérateurs
942     H = HO["Direct"].appliedControledFormTo
943     #
944     if selfA._parameters["EstimationOf"] == "State":
945         M = EM["Direct"].appliedControledFormTo
946     #
947     if CM is not None and "Tangent" in CM and U is not None:
948         Cm = CM["Tangent"].asMatrix(Xb)
949     else:
950         Cm = None
951     #
952     # Précalcul des inversions de B et R
953     RIdemi = R.sqrtmI()
954     #
955     # Durée d'observation et tailles
956     LagL = selfA._parameters["SmootherLagL"]
957     if (not hasattr(Y,"store")) or (not hasattr(Y,"stepnumber")):
958         raise ValueError("Fixed-lag smoother requires a series of observation")
959     if Y.stepnumber() < LagL:
960         raise ValueError("Fixed-lag smoother requires a series of observation greater then the lag L")
961     duration = Y.stepnumber()
962     __p = numpy.cumprod(Y.shape())[-1]
963     __n = Xb.size
964     __m = selfA._parameters["NumberOfMembers"]
965     #
966     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
967         selfA.StoredVariables["Analysis"].store( Xb )
968         if selfA._toStore("APosterioriCovariance"):
969             if hasattr(B,"asfullmatrix"):
970                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
971             else:
972                 selfA.StoredVariables["APosterioriCovariance"].store( B )
973     #
974     # Calcul direct initial (on privilégie la mémorisation au recalcul)
975     __seed = numpy.random.get_state()
976     selfB = copy.deepcopy(selfA)
977     selfB._parameters["StoreSupplementaryCalculations"] = ["CurrentEnsembleState"]
978     if VariantM == "EnKS16-KalmanFilterFormula":
979         etkf(selfB, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM = "KalmanFilterFormula")
980     else:
981         raise ValueError("VariantM has to be chosen in the authorized methods list.")
982     if LagL > 0:
983         EL  = selfB.StoredVariables["CurrentEnsembleState"][LagL-1]
984     else:
985         EL = EnsembleOfBackgroundPerturbations( Xb, None, __m ) # Cf. etkf
986     selfA._parameters["SetSeed"] = numpy.random.set_state(__seed)
987     #
988     for step in range(LagL,duration-1):
989         #
990         sEL = selfB.StoredVariables["CurrentEnsembleState"][step+1-LagL:step+1]
991         sEL.append(None)
992         #
993         if hasattr(Y,"store"):
994             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
995         else:
996             Ynpu = numpy.ravel( Y ).reshape((__p,1))
997         #
998         if U is not None:
999             if hasattr(U,"store") and len(U)>1:
1000                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
1001             elif hasattr(U,"store") and len(U)==1:
1002                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
1003             else:
1004                 Un = numpy.asmatrix(numpy.ravel( U )).T
1005         else:
1006             Un = None
1007         #
1008         #--------------------------
1009         if VariantM == "EnKS16-KalmanFilterFormula":
1010             if selfA._parameters["EstimationOf"] == "State": # Forecast
1011                 EL = M( [(EL[:,i], Un) for i in range(__m)],
1012                     argsAsSerie = True,
1013                     returnSerieAsArrayMatrix = True )
1014                 EL = EnsemblePerturbationWithGivenCovariance( EL, Q )
1015                 EZ = H( [(EL[:,i], Un) for i in range(__m)],
1016                     argsAsSerie = True,
1017                     returnSerieAsArrayMatrix = True )
1018                 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
1019                     Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
1020                     EZ = EZ + Cm * Un
1021             elif selfA._parameters["EstimationOf"] == "Parameters":
1022                 # --- > Par principe, M = Id, Q = 0
1023                 EZ = H( [(EL[:,i], Un) for i in range(__m)],
1024                     argsAsSerie = True,
1025                     returnSerieAsArrayMatrix = True )
1026             #
1027             vEm   = EL.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1028             vZm   = EZ.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
1029             #
1030             mS    = RIdemi @ EnsembleOfAnomalies( EZ, vZm, 1./math.sqrt(__m-1) )
1031             mS    = mS.reshape((-1,__m)) # Pour dimension 1
1032             delta = RIdemi @ ( Ynpu - vZm )
1033             mT    = numpy.linalg.inv( numpy.identity(__m) + mS.T @ mS )
1034             vw    = mT @ mS.T @ delta
1035             #
1036             Tdemi = numpy.real(scipy.linalg.sqrtm(mT))
1037             mU    = numpy.identity(__m)
1038             wTU   = (vw.reshape((__m,1)) + math.sqrt(__m-1) * Tdemi @ mU)
1039             #
1040             EX    = EnsembleOfAnomalies( EL, vEm, 1./math.sqrt(__m-1) )
1041             EL    = vEm + EX @ wTU
1042             #
1043             sEL[LagL] = EL
1044             for irl in range(LagL): # Lissage des L précédentes analysis
1045                 vEm = sEL[irl].mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1046                 EX = EnsembleOfAnomalies( sEL[irl], vEm, 1./math.sqrt(__m-1) )
1047                 sEL[irl] = vEm + EX @ wTU
1048             #
1049             # Conservation de l'analyse retrospective d'ordre 0 avant rotation
1050             Xa = sEL[0].mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1051             if selfA._toStore("APosterioriCovariance"):
1052                 EXn = sEL[0]
1053             #
1054             for irl in range(LagL):
1055                 sEL[irl] = sEL[irl+1]
1056             sEL[LagL] = None
1057         #--------------------------
1058         else:
1059             raise ValueError("VariantM has to be chosen in the authorized methods list.")
1060         #
1061         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1062         # ---> avec analysis
1063         selfA.StoredVariables["Analysis"].store( Xa )
1064         if selfA._toStore("APosterioriCovariance"):
1065             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(EXn) )
1066     #
1067     # Stockage des dernières analyses incomplètement remises à jour
1068     for irl in range(LagL):
1069         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1070         Xa = sEL[irl].mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1071         selfA.StoredVariables["Analysis"].store( Xa )
1072     #
1073     return 0
1074
1075 # ==============================================================================
1076 def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"):
1077     """
1078     Ensemble-Transform EnKF
1079     """
1080     if selfA._parameters["EstimationOf"] == "Parameters":
1081         selfA._parameters["StoreInternalVariables"] = True
1082     #
1083     # Opérateurs
1084     H = HO["Direct"].appliedControledFormTo
1085     #
1086     if selfA._parameters["EstimationOf"] == "State":
1087         M = EM["Direct"].appliedControledFormTo
1088     #
1089     if CM is not None and "Tangent" in CM and U is not None:
1090         Cm = CM["Tangent"].asMatrix(Xb)
1091     else:
1092         Cm = None
1093     #
1094     # Durée d'observation et tailles
1095     if hasattr(Y,"stepnumber"):
1096         duration = Y.stepnumber()
1097         __p = numpy.cumprod(Y.shape())[-1]
1098     else:
1099         duration = 2
1100         __p = numpy.array(Y).size
1101     #
1102     # Précalcul des inversions de B et R
1103     if selfA._parameters["StoreInternalVariables"] \
1104         or selfA._toStore("CostFunctionJ") \
1105         or selfA._toStore("CostFunctionJb") \
1106         or selfA._toStore("CostFunctionJo") \
1107         or selfA._toStore("CurrentOptimum") \
1108         or selfA._toStore("APosterioriCovariance"):
1109         BI = B.getI()
1110         RI = R.getI()
1111     elif VariantM != "KalmanFilterFormula":
1112         RI = R.getI()
1113     if VariantM == "KalmanFilterFormula":
1114         RIdemi = R.sqrtmI()
1115     #
1116     __n = Xb.size
1117     __m = selfA._parameters["NumberOfMembers"]
1118     #
1119     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
1120         Xn = EnsembleOfBackgroundPerturbations( Xb, None, __m )
1121         selfA.StoredVariables["Analysis"].store( Xb )
1122         if selfA._toStore("APosterioriCovariance"):
1123             if hasattr(B,"asfullmatrix"):
1124                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
1125             else:
1126                 selfA.StoredVariables["APosterioriCovariance"].store( B )
1127         selfA._setInternalState("seed", numpy.random.get_state())
1128     elif selfA._parameters["nextStep"]:
1129         Xn = selfA._getInternalState("Xn")
1130     #
1131     previousJMinimum = numpy.finfo(float).max
1132     #
1133     for step in range(duration-1):
1134         numpy.random.set_state(selfA._getInternalState("seed"))
1135         if hasattr(Y,"store"):
1136             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
1137         else:
1138             Ynpu = numpy.ravel( Y ).reshape((__p,1))
1139         #
1140         if U is not None:
1141             if hasattr(U,"store") and len(U)>1:
1142                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
1143             elif hasattr(U,"store") and len(U)==1:
1144                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
1145             else:
1146                 Un = numpy.asmatrix(numpy.ravel( U )).T
1147         else:
1148             Un = None
1149         #
1150         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
1151             Xn = CovarianceInflation( Xn,
1152                 selfA._parameters["InflationType"],
1153                 selfA._parameters["InflationFactor"],
1154                 )
1155         #
1156         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
1157             EMX = M( [(Xn[:,i], Un) for i in range(__m)],
1158                 argsAsSerie = True,
1159                 returnSerieAsArrayMatrix = True )
1160             Xn_predicted = EnsemblePerturbationWithGivenCovariance( EMX, Q )
1161             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
1162                 argsAsSerie = True,
1163                 returnSerieAsArrayMatrix = True )
1164             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
1165                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
1166                 Xn_predicted = Xn_predicted + Cm * Un
1167         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
1168             # --- > Par principe, M = Id, Q = 0
1169             Xn_predicted = EMX = Xn
1170             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
1171                 argsAsSerie = True,
1172                 returnSerieAsArrayMatrix = True )
1173         #
1174         # Mean of forecast and observation of forecast
1175         Xfm  = Xn_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1176         Hfm  = HX_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
1177         #
1178         # Anomalies
1179         EaX   = EnsembleOfAnomalies( Xn_predicted, Xfm )
1180         EaHX  = EnsembleOfAnomalies( HX_predicted, Hfm)
1181         #
1182         #--------------------------
1183         if VariantM == "KalmanFilterFormula":
1184             mS    = RIdemi * EaHX / math.sqrt(__m-1)
1185             mS    = mS.reshape((-1,__m)) # Pour dimension 1
1186             delta = RIdemi * ( Ynpu - Hfm )
1187             mT    = numpy.linalg.inv( numpy.identity(__m) + mS.T @ mS )
1188             vw    = mT @ mS.T @ delta
1189             #
1190             Tdemi = numpy.real(scipy.linalg.sqrtm(mT))
1191             mU    = numpy.identity(__m)
1192             #
1193             EaX   = EaX / math.sqrt(__m-1)
1194             Xn    = Xfm + EaX @ ( vw.reshape((__m,1)) + math.sqrt(__m-1) * Tdemi @ mU )
1195         #--------------------------
1196         elif VariantM == "Variational":
1197             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
1198             def CostFunction(w):
1199                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1200                 _Jo = 0.5 * _A.T @ (RI * _A)
1201                 _Jb = 0.5 * (__m-1) * w.T @ w
1202                 _J  = _Jo + _Jb
1203                 return float(_J)
1204             def GradientOfCostFunction(w):
1205                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1206                 _GardJo = - EaHX.T @ (RI * _A)
1207                 _GradJb = (__m-1) * w.reshape((__m,1))
1208                 _GradJ  = _GardJo + _GradJb
1209                 return numpy.ravel(_GradJ)
1210             vw = scipy.optimize.fmin_cg(
1211                 f           = CostFunction,
1212                 x0          = numpy.zeros(__m),
1213                 fprime      = GradientOfCostFunction,
1214                 args        = (),
1215                 disp        = False,
1216                 )
1217             #
1218             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1219             Htb = (__m-1) * numpy.identity(__m)
1220             Hta = Hto + Htb
1221             #
1222             Pta = numpy.linalg.inv( Hta )
1223             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1224             #
1225             Xn  = Xfm + EaX @ (vw[:,None] + EWa)
1226         #--------------------------
1227         elif VariantM == "FiniteSize11": # Jauge Boc2011
1228             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
1229             def CostFunction(w):
1230                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1231                 _Jo = 0.5 * _A.T @ (RI * _A)
1232                 _Jb = 0.5 * __m * math.log(1 + 1/__m + w.T @ w)
1233                 _J  = _Jo + _Jb
1234                 return float(_J)
1235             def GradientOfCostFunction(w):
1236                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1237                 _GardJo = - EaHX.T @ (RI * _A)
1238                 _GradJb = __m * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w)
1239                 _GradJ  = _GardJo + _GradJb
1240                 return numpy.ravel(_GradJ)
1241             vw = scipy.optimize.fmin_cg(
1242                 f           = CostFunction,
1243                 x0          = numpy.zeros(__m),
1244                 fprime      = GradientOfCostFunction,
1245                 args        = (),
1246                 disp        = False,
1247                 )
1248             #
1249             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1250             Htb = __m * \
1251                 ( (1 + 1/__m + vw.T @ vw) * numpy.identity(__m) - 2 * vw @ vw.T ) \
1252                 / (1 + 1/__m + vw.T @ vw)**2
1253             Hta = Hto + Htb
1254             #
1255             Pta = numpy.linalg.inv( Hta )
1256             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1257             #
1258             Xn  = Xfm + EaX @ (vw.reshape((__m,1)) + EWa)
1259         #--------------------------
1260         elif VariantM == "FiniteSize15": # Jauge Boc2015
1261             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
1262             def CostFunction(w):
1263                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1264                 _Jo = 0.5 * _A.T * RI * _A
1265                 _Jb = 0.5 * (__m+1) * math.log(1 + 1/__m + w.T @ w)
1266                 _J  = _Jo + _Jb
1267                 return float(_J)
1268             def GradientOfCostFunction(w):
1269                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1270                 _GardJo = - EaHX.T @ (RI * _A)
1271                 _GradJb = (__m+1) * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w)
1272                 _GradJ  = _GardJo + _GradJb
1273                 return numpy.ravel(_GradJ)
1274             vw = scipy.optimize.fmin_cg(
1275                 f           = CostFunction,
1276                 x0          = numpy.zeros(__m),
1277                 fprime      = GradientOfCostFunction,
1278                 args        = (),
1279                 disp        = False,
1280                 )
1281             #
1282             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1283             Htb = (__m+1) * \
1284                 ( (1 + 1/__m + vw.T @ vw) * numpy.identity(__m) - 2 * vw @ vw.T ) \
1285                 / (1 + 1/__m + vw.T @ vw)**2
1286             Hta = Hto + Htb
1287             #
1288             Pta = numpy.linalg.inv( Hta )
1289             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1290             #
1291             Xn  = Xfm + EaX @ (vw.reshape((__m,1)) + EWa)
1292         #--------------------------
1293         elif VariantM == "FiniteSize16": # Jauge Boc2016
1294             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
1295             def CostFunction(w):
1296                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1297                 _Jo = 0.5 * _A.T @ (RI * _A)
1298                 _Jb = 0.5 * (__m+1) * math.log(1 + 1/__m + w.T @ w / (__m-1))
1299                 _J  = _Jo + _Jb
1300                 return float(_J)
1301             def GradientOfCostFunction(w):
1302                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1303                 _GardJo = - EaHX.T @ (RI * _A)
1304                 _GradJb = ((__m+1) / (__m-1)) * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w / (__m-1))
1305                 _GradJ  = _GardJo + _GradJb
1306                 return numpy.ravel(_GradJ)
1307             vw = scipy.optimize.fmin_cg(
1308                 f           = CostFunction,
1309                 x0          = numpy.zeros(__m),
1310                 fprime      = GradientOfCostFunction,
1311                 args        = (),
1312                 disp        = False,
1313                 )
1314             #
1315             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1316             Htb = ((__m+1) / (__m-1)) * \
1317                 ( (1 + 1/__m + vw.T @ vw / (__m-1)) * numpy.identity(__m) - 2 * vw @ vw.T / (__m-1) ) \
1318                 / (1 + 1/__m + vw.T @ vw / (__m-1))**2
1319             Hta = Hto + Htb
1320             #
1321             Pta = numpy.linalg.inv( Hta )
1322             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1323             #
1324             Xn  = Xfm + EaX @ (vw[:,None] + EWa)
1325         #--------------------------
1326         else:
1327             raise ValueError("VariantM has to be chosen in the authorized methods list.")
1328         #
1329         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
1330             Xn = CovarianceInflation( Xn,
1331                 selfA._parameters["InflationType"],
1332                 selfA._parameters["InflationFactor"],
1333                 )
1334         #
1335         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1336         #--------------------------
1337         selfA._setInternalState("Xn", Xn)
1338         selfA._setInternalState("seed", numpy.random.get_state())
1339         #--------------------------
1340         #
1341         if selfA._parameters["StoreInternalVariables"] \
1342             or selfA._toStore("CostFunctionJ") \
1343             or selfA._toStore("CostFunctionJb") \
1344             or selfA._toStore("CostFunctionJo") \
1345             or selfA._toStore("APosterioriCovariance") \
1346             or selfA._toStore("InnovationAtCurrentAnalysis") \
1347             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
1348             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1349             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
1350             _Innovation = Ynpu - _HXa
1351         #
1352         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1353         # ---> avec analysis
1354         selfA.StoredVariables["Analysis"].store( Xa )
1355         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
1356             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
1357         if selfA._toStore("InnovationAtCurrentAnalysis"):
1358             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
1359         # ---> avec current state
1360         if selfA._parameters["StoreInternalVariables"] \
1361             or selfA._toStore("CurrentState"):
1362             selfA.StoredVariables["CurrentState"].store( Xn )
1363         if selfA._toStore("ForecastState"):
1364             selfA.StoredVariables["ForecastState"].store( EMX )
1365         if selfA._toStore("ForecastCovariance"):
1366             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(EMX) )
1367         if selfA._toStore("BMA"):
1368             selfA.StoredVariables["BMA"].store( EMX - Xa )
1369         if selfA._toStore("InnovationAtCurrentState"):
1370             selfA.StoredVariables["InnovationAtCurrentState"].store( - HX_predicted + Ynpu )
1371         if selfA._toStore("SimulatedObservationAtCurrentState") \
1372             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1373             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
1374         # ---> autres
1375         if selfA._parameters["StoreInternalVariables"] \
1376             or selfA._toStore("CostFunctionJ") \
1377             or selfA._toStore("CostFunctionJb") \
1378             or selfA._toStore("CostFunctionJo") \
1379             or selfA._toStore("CurrentOptimum") \
1380             or selfA._toStore("APosterioriCovariance"):
1381             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
1382             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
1383             J   = Jb + Jo
1384             selfA.StoredVariables["CostFunctionJb"].store( Jb )
1385             selfA.StoredVariables["CostFunctionJo"].store( Jo )
1386             selfA.StoredVariables["CostFunctionJ" ].store( J )
1387             #
1388             if selfA._toStore("IndexOfOptimum") \
1389                 or selfA._toStore("CurrentOptimum") \
1390                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
1391                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
1392                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
1393                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1394                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1395             if selfA._toStore("IndexOfOptimum"):
1396                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1397             if selfA._toStore("CurrentOptimum"):
1398                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
1399             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1400                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
1401             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1402                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1403             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1404                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1405             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1406                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1407         if selfA._toStore("APosterioriCovariance"):
1408             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
1409         if selfA._parameters["EstimationOf"] == "Parameters" \
1410             and J < previousJMinimum:
1411             previousJMinimum    = J
1412             XaMin               = Xa
1413             if selfA._toStore("APosterioriCovariance"):
1414                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
1415         # ---> Pour les smoothers
1416         if selfA._toStore("CurrentEnsembleState"):
1417             selfA.StoredVariables["CurrentEnsembleState"].store( Xn )
1418     #
1419     # Stockage final supplémentaire de l'optimum en estimation de paramètres
1420     # ----------------------------------------------------------------------
1421     if selfA._parameters["EstimationOf"] == "Parameters":
1422         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1423         selfA.StoredVariables["Analysis"].store( XaMin )
1424         if selfA._toStore("APosterioriCovariance"):
1425             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
1426         if selfA._toStore("BMA"):
1427             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
1428     #
1429     return 0
1430
1431 # ==============================================================================
1432 def exkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
1433     """
1434     Extended Kalman Filter
1435     """
1436     if selfA._parameters["EstimationOf"] == "Parameters":
1437         selfA._parameters["StoreInternalVariables"] = True
1438     #
1439     # Opérateurs
1440     H = HO["Direct"].appliedControledFormTo
1441     #
1442     if selfA._parameters["EstimationOf"] == "State":
1443         M = EM["Direct"].appliedControledFormTo
1444     #
1445     if CM is not None and "Tangent" in CM and U is not None:
1446         Cm = CM["Tangent"].asMatrix(Xb)
1447     else:
1448         Cm = None
1449     #
1450     # Durée d'observation et tailles
1451     if hasattr(Y,"stepnumber"):
1452         duration = Y.stepnumber()
1453         __p = numpy.cumprod(Y.shape())[-1]
1454     else:
1455         duration = 2
1456         __p = numpy.array(Y).size
1457     #
1458     # Précalcul des inversions de B et R
1459     if selfA._parameters["StoreInternalVariables"] \
1460         or selfA._toStore("CostFunctionJ") \
1461         or selfA._toStore("CostFunctionJb") \
1462         or selfA._toStore("CostFunctionJo") \
1463         or selfA._toStore("CurrentOptimum") \
1464         or selfA._toStore("APosterioriCovariance"):
1465         BI = B.getI()
1466         RI = R.getI()
1467     #
1468     __n = Xb.size
1469     #
1470     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
1471         Xn = Xb
1472         Pn = B
1473         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1474         selfA.StoredVariables["Analysis"].store( Xb )
1475         if selfA._toStore("APosterioriCovariance"):
1476             if hasattr(B,"asfullmatrix"):
1477                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
1478             else:
1479                 selfA.StoredVariables["APosterioriCovariance"].store( B )
1480         selfA._setInternalState("seed", numpy.random.get_state())
1481     elif selfA._parameters["nextStep"]:
1482         Xn = selfA._getInternalState("Xn")
1483         Pn = selfA._getInternalState("Pn")
1484     #
1485     if selfA._parameters["EstimationOf"] == "Parameters":
1486         XaMin            = Xn
1487         previousJMinimum = numpy.finfo(float).max
1488     #
1489     for step in range(duration-1):
1490         if hasattr(Y,"store"):
1491             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
1492         else:
1493             Ynpu = numpy.ravel( Y ).reshape((__p,1))
1494         #
1495         Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xn)
1496         Ht = Ht.reshape(Ynpu.size,Xn.size) # ADAO & check shape
1497         Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = Xn)
1498         Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape
1499         #
1500         if selfA._parameters["EstimationOf"] == "State":
1501             Mt = EM["Tangent"].asMatrix(ValueForMethodForm = Xn)
1502             Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape
1503             Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = Xn)
1504             Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape
1505         #
1506         if U is not None:
1507             if hasattr(U,"store") and len(U)>1:
1508                 Un = numpy.ravel( U[step] ).reshape((-1,1))
1509             elif hasattr(U,"store") and len(U)==1:
1510                 Un = numpy.ravel( U[0] ).reshape((-1,1))
1511             else:
1512                 Un = numpy.ravel( U ).reshape((-1,1))
1513         else:
1514             Un = None
1515         #
1516         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
1517             Xn_predicted = numpy.ravel( M( (Xn, Un) ) ).reshape((__n,1))
1518             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
1519                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
1520                 Xn_predicted = Xn_predicted + Cm * Un
1521             Pn_predicted = Q + Mt * Pn * Ma
1522         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
1523             # --- > Par principe, M = Id, Q = 0
1524             Xn_predicted = Xn
1525             Pn_predicted = Pn
1526         #
1527         if selfA._parameters["EstimationOf"] == "State":
1528             HX_predicted = numpy.ravel( H( (Xn_predicted, None) ) ).reshape((__p,1))
1529             _Innovation  = Ynpu - HX_predicted
1530         elif selfA._parameters["EstimationOf"] == "Parameters":
1531             HX_predicted = numpy.ravel( H( (Xn_predicted, Un) ) ).reshape((__p,1))
1532             _Innovation  = Ynpu - HX_predicted
1533             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
1534                 _Innovation = _Innovation - Cm * Un
1535         #
1536         Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
1537         Xn = Xn_predicted + Kn * _Innovation
1538         Pn = Pn_predicted - Kn * Ht * Pn_predicted
1539         #
1540         Xa = Xn # Pointeurs
1541         #--------------------------
1542         selfA._setInternalState("Xn", Xn)
1543         selfA._setInternalState("Pn", Pn)
1544         #--------------------------
1545         #
1546         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1547         # ---> avec analysis
1548         selfA.StoredVariables["Analysis"].store( Xa )
1549         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
1550             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( H((Xa, Un)) )
1551         if selfA._toStore("InnovationAtCurrentAnalysis"):
1552             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
1553         # ---> avec current state
1554         if selfA._parameters["StoreInternalVariables"] \
1555             or selfA._toStore("CurrentState"):
1556             selfA.StoredVariables["CurrentState"].store( Xn )
1557         if selfA._toStore("ForecastState"):
1558             selfA.StoredVariables["ForecastState"].store( Xn_predicted )
1559         if selfA._toStore("ForecastCovariance"):
1560             selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted )
1561         if selfA._toStore("BMA"):
1562             selfA.StoredVariables["BMA"].store( Xn_predicted - Xa )
1563         if selfA._toStore("InnovationAtCurrentState"):
1564             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
1565         if selfA._toStore("SimulatedObservationAtCurrentState") \
1566             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1567             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
1568         # ---> autres
1569         if selfA._parameters["StoreInternalVariables"] \
1570             or selfA._toStore("CostFunctionJ") \
1571             or selfA._toStore("CostFunctionJb") \
1572             or selfA._toStore("CostFunctionJo") \
1573             or selfA._toStore("CurrentOptimum") \
1574             or selfA._toStore("APosterioriCovariance"):
1575             Jb  = float( 0.5 * (Xa - Xb).T @ (BI @ (Xa - Xb)) )
1576             Jo  = float( 0.5 * _Innovation.T @ (RI @ _Innovation) )
1577             J   = Jb + Jo
1578             selfA.StoredVariables["CostFunctionJb"].store( Jb )
1579             selfA.StoredVariables["CostFunctionJo"].store( Jo )
1580             selfA.StoredVariables["CostFunctionJ" ].store( J )
1581             #
1582             if selfA._toStore("IndexOfOptimum") \
1583                 or selfA._toStore("CurrentOptimum") \
1584                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
1585                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
1586                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
1587                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1588                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1589             if selfA._toStore("IndexOfOptimum"):
1590                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1591             if selfA._toStore("CurrentOptimum"):
1592                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
1593             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1594                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
1595             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1596                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1597             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1598                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1599             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1600                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1601         if selfA._toStore("APosterioriCovariance"):
1602             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
1603         if selfA._parameters["EstimationOf"] == "Parameters" \
1604             and J < previousJMinimum:
1605             previousJMinimum    = J
1606             XaMin               = Xa
1607             if selfA._toStore("APosterioriCovariance"):
1608                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
1609     #
1610     # Stockage final supplémentaire de l'optimum en estimation de paramètres
1611     # ----------------------------------------------------------------------
1612     if selfA._parameters["EstimationOf"] == "Parameters":
1613         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1614         selfA.StoredVariables["Analysis"].store( XaMin )
1615         if selfA._toStore("APosterioriCovariance"):
1616             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
1617         if selfA._toStore("BMA"):
1618             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
1619     #
1620     return 0
1621
1622 # ==============================================================================
1623 def ienkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="IEnKF12",
1624     BnotT=False, _epsilon=1.e-3, _e=1.e-7, _jmax=15000):
1625     """
1626     Iterative EnKF
1627     """
1628     if selfA._parameters["EstimationOf"] == "Parameters":
1629         selfA._parameters["StoreInternalVariables"] = True
1630     #
1631     # Opérateurs
1632     H = HO["Direct"].appliedControledFormTo
1633     #
1634     if selfA._parameters["EstimationOf"] == "State":
1635         M = EM["Direct"].appliedControledFormTo
1636     #
1637     if CM is not None and "Tangent" in CM and U is not None:
1638         Cm = CM["Tangent"].asMatrix(Xb)
1639     else:
1640         Cm = None
1641     #
1642     # Durée d'observation et tailles
1643     if hasattr(Y,"stepnumber"):
1644         duration = Y.stepnumber()
1645         __p = numpy.cumprod(Y.shape())[-1]
1646     else:
1647         duration = 2
1648         __p = numpy.array(Y).size
1649     #
1650     # Précalcul des inversions de B et R
1651     if selfA._parameters["StoreInternalVariables"] \
1652         or selfA._toStore("CostFunctionJ") \
1653         or selfA._toStore("CostFunctionJb") \
1654         or selfA._toStore("CostFunctionJo") \
1655         or selfA._toStore("CurrentOptimum") \
1656         or selfA._toStore("APosterioriCovariance"):
1657         BI = B.getI()
1658     RI = R.getI()
1659     #
1660     __n = Xb.size
1661     __m = selfA._parameters["NumberOfMembers"]
1662     #
1663     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
1664         if hasattr(B,"asfullmatrix"): Pn = B.asfullmatrix(__n)
1665         else:                         Pn = B
1666         Xn = EnsembleOfBackgroundPerturbations( Xb, Pn, __m )
1667         selfA.StoredVariables["Analysis"].store( Xb )
1668         if selfA._toStore("APosterioriCovariance"):
1669             if hasattr(B,"asfullmatrix"):
1670                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
1671             else:
1672                 selfA.StoredVariables["APosterioriCovariance"].store( B )
1673         selfA._setInternalState("seed", numpy.random.get_state())
1674     elif selfA._parameters["nextStep"]:
1675         Xn = selfA._getInternalState("Xn")
1676     #
1677     previousJMinimum = numpy.finfo(float).max
1678     #
1679     for step in range(duration-1):
1680         numpy.random.set_state(selfA._getInternalState("seed"))
1681         if hasattr(Y,"store"):
1682             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
1683         else:
1684             Ynpu = numpy.ravel( Y ).reshape((__p,1))
1685         #
1686         if U is not None:
1687             if hasattr(U,"store") and len(U)>1:
1688                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
1689             elif hasattr(U,"store") and len(U)==1:
1690                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
1691             else:
1692                 Un = numpy.asmatrix(numpy.ravel( U )).T
1693         else:
1694             Un = None
1695         #
1696         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
1697             Xn = CovarianceInflation( Xn,
1698                 selfA._parameters["InflationType"],
1699                 selfA._parameters["InflationFactor"],
1700                 )
1701         #
1702         #--------------------------
1703         if VariantM == "IEnKF12":
1704             Xfm = numpy.ravel(Xn.mean(axis=1, dtype=mfp).astype('float'))
1705             EaX = EnsembleOfAnomalies( Xn ) / math.sqrt(__m-1)
1706             __j = 0
1707             Deltaw = 1
1708             if not BnotT:
1709                 Ta  = numpy.identity(__m)
1710             vw  = numpy.zeros(__m)
1711             while numpy.linalg.norm(Deltaw) >= _e and __j <= _jmax:
1712                 vx1 = (Xfm + EaX @ vw).reshape((__n,1))
1713                 #
1714                 if BnotT:
1715                     E1 = vx1 + _epsilon * EaX
1716                 else:
1717                     E1 = vx1 + math.sqrt(__m-1) * EaX @ Ta
1718                 #
1719                 if selfA._parameters["EstimationOf"] == "State": # Forecast + Q
1720                     E2 = M( [(E1[:,i,numpy.newaxis], Un) for i in range(__m)],
1721                         argsAsSerie = True,
1722                         returnSerieAsArrayMatrix = True )
1723                 elif selfA._parameters["EstimationOf"] == "Parameters":
1724                     # --- > Par principe, M = Id
1725                     E2 = Xn
1726                 vx2 = E2.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1727                 vy1 = H((vx2, Un)).reshape((__p,1))
1728                 #
1729                 HE2 = H( [(E2[:,i,numpy.newaxis], Un) for i in range(__m)],
1730                     argsAsSerie = True,
1731                     returnSerieAsArrayMatrix = True )
1732                 vy2 = HE2.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
1733                 #
1734                 if BnotT:
1735                     EaY = (HE2 - vy2) / _epsilon
1736                 else:
1737                     EaY = ( (HE2 - vy2) @ numpy.linalg.inv(Ta) ) / math.sqrt(__m-1)
1738                 #
1739                 GradJ = numpy.ravel(vw[:,None] - EaY.transpose() @ (RI * ( Ynpu - vy1 )))
1740                 mH = numpy.identity(__m) + EaY.transpose() @ (RI * EaY).reshape((-1,__m))
1741                 Deltaw = - numpy.linalg.solve(mH,GradJ)
1742                 #
1743                 vw = vw + Deltaw
1744                 #
1745                 if not BnotT:
1746                     Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
1747                 #
1748                 __j = __j + 1
1749             #
1750             A2 = EnsembleOfAnomalies( E2 )
1751             #
1752             if BnotT:
1753                 Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
1754                 A2 = math.sqrt(__m-1) * A2 @ Ta / _epsilon
1755             #
1756             Xn = vx2 + A2
1757         #--------------------------
1758         else:
1759             raise ValueError("VariantM has to be chosen in the authorized methods list.")
1760         #
1761         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
1762             Xn = CovarianceInflation( Xn,
1763                 selfA._parameters["InflationType"],
1764                 selfA._parameters["InflationFactor"],
1765                 )
1766         #
1767         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1768         #--------------------------
1769         selfA._setInternalState("Xn", Xn)
1770         selfA._setInternalState("seed", numpy.random.get_state())
1771         #--------------------------
1772         #
1773         if selfA._parameters["StoreInternalVariables"] \
1774             or selfA._toStore("CostFunctionJ") \
1775             or selfA._toStore("CostFunctionJb") \
1776             or selfA._toStore("CostFunctionJo") \
1777             or selfA._toStore("APosterioriCovariance") \
1778             or selfA._toStore("InnovationAtCurrentAnalysis") \
1779             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
1780             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1781             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
1782             _Innovation = Ynpu - _HXa
1783         #
1784         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1785         # ---> avec analysis
1786         selfA.StoredVariables["Analysis"].store( Xa )
1787         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
1788             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
1789         if selfA._toStore("InnovationAtCurrentAnalysis"):
1790             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
1791         # ---> avec current state
1792         if selfA._parameters["StoreInternalVariables"] \
1793             or selfA._toStore("CurrentState"):
1794             selfA.StoredVariables["CurrentState"].store( Xn )
1795         if selfA._toStore("ForecastState"):
1796             selfA.StoredVariables["ForecastState"].store( E2 )
1797         if selfA._toStore("ForecastCovariance"):
1798             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(E2) )
1799         if selfA._toStore("BMA"):
1800             selfA.StoredVariables["BMA"].store( E2 - Xa )
1801         if selfA._toStore("InnovationAtCurrentState"):
1802             selfA.StoredVariables["InnovationAtCurrentState"].store( - HE2 + Ynpu )
1803         if selfA._toStore("SimulatedObservationAtCurrentState") \
1804             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1805             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HE2 )
1806         # ---> autres
1807         if selfA._parameters["StoreInternalVariables"] \
1808             or selfA._toStore("CostFunctionJ") \
1809             or selfA._toStore("CostFunctionJb") \
1810             or selfA._toStore("CostFunctionJo") \
1811             or selfA._toStore("CurrentOptimum") \
1812             or selfA._toStore("APosterioriCovariance"):
1813             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
1814             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
1815             J   = Jb + Jo
1816             selfA.StoredVariables["CostFunctionJb"].store( Jb )
1817             selfA.StoredVariables["CostFunctionJo"].store( Jo )
1818             selfA.StoredVariables["CostFunctionJ" ].store( J )
1819             #
1820             if selfA._toStore("IndexOfOptimum") \
1821                 or selfA._toStore("CurrentOptimum") \
1822                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
1823                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
1824                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
1825                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1826                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1827             if selfA._toStore("IndexOfOptimum"):
1828                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1829             if selfA._toStore("CurrentOptimum"):
1830                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
1831             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1832                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
1833             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1834                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1835             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1836                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1837             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1838                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1839         if selfA._toStore("APosterioriCovariance"):
1840             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
1841         if selfA._parameters["EstimationOf"] == "Parameters" \
1842             and J < previousJMinimum:
1843             previousJMinimum    = J
1844             XaMin               = Xa
1845             if selfA._toStore("APosterioriCovariance"):
1846                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
1847         # ---> Pour les smoothers
1848         if selfA._toStore("CurrentEnsembleState"):
1849             selfA.StoredVariables["CurrentEnsembleState"].store( Xn )
1850     #
1851     # Stockage final supplémentaire de l'optimum en estimation de paramètres
1852     # ----------------------------------------------------------------------
1853     if selfA._parameters["EstimationOf"] == "Parameters":
1854         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1855         selfA.StoredVariables["Analysis"].store( XaMin )
1856         if selfA._toStore("APosterioriCovariance"):
1857             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
1858         if selfA._toStore("BMA"):
1859             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
1860     #
1861     return 0
1862
1863 # ==============================================================================
1864 def incr3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
1865     """
1866     3DVAR incrémental
1867     """
1868     #
1869     # Initialisations
1870     # ---------------
1871     #
1872     # Opérateur non-linéaire pour la boucle externe
1873     Hm = HO["Direct"].appliedTo
1874     #
1875     # Précalcul des inversions de B et R
1876     BI = B.getI()
1877     RI = R.getI()
1878     #
1879     # Point de démarrage de l'optimisation
1880     Xini = selfA._parameters["InitializationPoint"]
1881     #
1882     HXb = numpy.asmatrix(numpy.ravel( Hm( Xb ) )).T
1883     Innovation = Y - HXb
1884     #
1885     # Outer Loop
1886     # ----------
1887     iOuter = 0
1888     J      = 1./mpr
1889     DeltaJ = 1./mpr
1890     Xr     = Xini.reshape((-1,1))
1891     while abs(DeltaJ) >= selfA._parameters["CostDecrementTolerance"] and iOuter <= selfA._parameters["MaximumNumberOfSteps"]:
1892         #
1893         # Inner Loop
1894         # ----------
1895         Ht = HO["Tangent"].asMatrix(Xr)
1896         Ht = Ht.reshape(Y.size,Xr.size) # ADAO & check shape
1897         #
1898         # Définition de la fonction-coût
1899         # ------------------------------
1900         def CostFunction(dx):
1901             _dX  = numpy.asmatrix(numpy.ravel( dx )).T
1902             if selfA._parameters["StoreInternalVariables"] or \
1903                 selfA._toStore("CurrentState") or \
1904                 selfA._toStore("CurrentOptimum"):
1905                 selfA.StoredVariables["CurrentState"].store( Xb + _dX )
1906             _HdX = Ht * _dX
1907             _HdX = numpy.asmatrix(numpy.ravel( _HdX )).T
1908             _dInnovation = Innovation - _HdX
1909             if selfA._toStore("SimulatedObservationAtCurrentState") or \
1910                 selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1911                 selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HXb + _HdX )
1912             if selfA._toStore("InnovationAtCurrentState"):
1913                 selfA.StoredVariables["InnovationAtCurrentState"].store( _dInnovation )
1914             #
1915             Jb  = float( 0.5 * _dX.T * BI * _dX )
1916             Jo  = float( 0.5 * _dInnovation.T * RI * _dInnovation )
1917             J   = Jb + Jo
1918             #
1919             selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
1920             selfA.StoredVariables["CostFunctionJb"].store( Jb )
1921             selfA.StoredVariables["CostFunctionJo"].store( Jo )
1922             selfA.StoredVariables["CostFunctionJ" ].store( J )
1923             if selfA._toStore("IndexOfOptimum") or \
1924                 selfA._toStore("CurrentOptimum") or \
1925                 selfA._toStore("CostFunctionJAtCurrentOptimum") or \
1926                 selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
1927                 selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
1928                 selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1929                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1930             if selfA._toStore("IndexOfOptimum"):
1931                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1932             if selfA._toStore("CurrentOptimum"):
1933                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
1934             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1935                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
1936             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1937                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1938             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1939                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1940             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1941                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1942             return J
1943         #
1944         def GradientOfCostFunction(dx):
1945             _dX          = numpy.asmatrix(numpy.ravel( dx )).T
1946             _HdX         = Ht * _dX
1947             _HdX         = numpy.asmatrix(numpy.ravel( _HdX )).T
1948             _dInnovation = Innovation - _HdX
1949             GradJb       = BI * _dX
1950             GradJo       = - Ht.T @ (RI * _dInnovation)
1951             GradJ        = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
1952             return GradJ
1953         #
1954         # Minimisation de la fonctionnelle
1955         # --------------------------------
1956         nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
1957         #
1958         if selfA._parameters["Minimizer"] == "LBFGSB":
1959             # Minimum, J_optimal, Informations = scipy.optimize.fmin_l_bfgs_b(
1960             if "0.19" <= scipy.version.version <= "1.1.0":
1961                 import lbfgsbhlt as optimiseur
1962             else:
1963                 import scipy.optimize as optimiseur
1964             Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
1965                 func        = CostFunction,
1966                 x0          = numpy.zeros(Xini.size),
1967                 fprime      = GradientOfCostFunction,
1968                 args        = (),
1969                 bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
1970                 maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
1971                 factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
1972                 pgtol       = selfA._parameters["ProjectedGradientTolerance"],
1973                 iprint      = selfA._parameters["optiprint"],
1974                 )
1975             nfeval = Informations['funcalls']
1976             rc     = Informations['warnflag']
1977         elif selfA._parameters["Minimizer"] == "TNC":
1978             Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
1979                 func        = CostFunction,
1980                 x0          = numpy.zeros(Xini.size),
1981                 fprime      = GradientOfCostFunction,
1982                 args        = (),
1983                 bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
1984                 maxfun      = selfA._parameters["MaximumNumberOfSteps"],
1985                 pgtol       = selfA._parameters["ProjectedGradientTolerance"],
1986                 ftol        = selfA._parameters["CostDecrementTolerance"],
1987                 messages    = selfA._parameters["optmessages"],
1988                 )
1989         elif selfA._parameters["Minimizer"] == "CG":
1990             Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
1991                 f           = CostFunction,
1992                 x0          = numpy.zeros(Xini.size),
1993                 fprime      = GradientOfCostFunction,
1994                 args        = (),
1995                 maxiter     = selfA._parameters["MaximumNumberOfSteps"],
1996                 gtol        = selfA._parameters["GradientNormTolerance"],
1997                 disp        = selfA._parameters["optdisp"],
1998                 full_output = True,
1999                 )
2000         elif selfA._parameters["Minimizer"] == "NCG":
2001             Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
2002                 f           = CostFunction,
2003                 x0          = numpy.zeros(Xini.size),
2004                 fprime      = GradientOfCostFunction,
2005                 args        = (),
2006                 maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2007                 avextol     = selfA._parameters["CostDecrementTolerance"],
2008                 disp        = selfA._parameters["optdisp"],
2009                 full_output = True,
2010                 )
2011         elif selfA._parameters["Minimizer"] == "BFGS":
2012             Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
2013                 f           = CostFunction,
2014                 x0          = numpy.zeros(Xini.size),
2015                 fprime      = GradientOfCostFunction,
2016                 args        = (),
2017                 maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2018                 gtol        = selfA._parameters["GradientNormTolerance"],
2019                 disp        = selfA._parameters["optdisp"],
2020                 full_output = True,
2021                 )
2022         else:
2023             raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
2024         #
2025         IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2026         MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
2027         #
2028         if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
2029             Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
2030             Minimum = numpy.asmatrix(numpy.ravel( Minimum )).T
2031         else:
2032             Minimum = Xb + numpy.asmatrix(numpy.ravel( Minimum )).T
2033         #
2034         Xr     = Minimum
2035         DeltaJ = selfA.StoredVariables["CostFunctionJ" ][-1] - J
2036         iOuter = selfA.StoredVariables["CurrentIterationNumber"][-1]
2037     #
2038     # Obtention de l'analyse
2039     # ----------------------
2040     Xa = Xr
2041     #
2042     selfA.StoredVariables["Analysis"].store( Xa )
2043     #
2044     if selfA._toStore("OMA") or \
2045         selfA._toStore("SigmaObs2") or \
2046         selfA._toStore("SimulationQuantiles") or \
2047         selfA._toStore("SimulatedObservationAtOptimum"):
2048         if selfA._toStore("SimulatedObservationAtCurrentState"):
2049             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
2050         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2051             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
2052         else:
2053             HXa = Hm( Xa )
2054     #
2055     # Calcul de la covariance d'analyse
2056     # ---------------------------------
2057     if selfA._toStore("APosterioriCovariance") or \
2058         selfA._toStore("SimulationQuantiles") or \
2059         selfA._toStore("JacobianMatrixAtOptimum") or \
2060         selfA._toStore("KalmanGainAtOptimum"):
2061         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
2062         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
2063     if selfA._toStore("APosterioriCovariance") or \
2064         selfA._toStore("SimulationQuantiles") or \
2065         selfA._toStore("KalmanGainAtOptimum"):
2066         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
2067         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
2068     if selfA._toStore("APosterioriCovariance") or \
2069         selfA._toStore("SimulationQuantiles"):
2070         HessienneI = []
2071         nb = Xa.size
2072         for i in range(nb):
2073             _ee    = numpy.matrix(numpy.zeros(nb)).T
2074             _ee[i] = 1.
2075             _HtEE  = numpy.dot(HtM,_ee)
2076             _HtEE  = numpy.asmatrix(numpy.ravel( _HtEE )).T
2077             HessienneI.append( numpy.ravel( BI*_ee + HaM * (RI * _HtEE) ) )
2078         HessienneI = numpy.matrix( HessienneI )
2079         A = HessienneI.I
2080         if min(A.shape) != max(A.shape):
2081             raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(selfA._name,str(A.shape)))
2082         if (numpy.diag(A) < 0).any():
2083             raise ValueError("The %s a posteriori covariance matrix A has at least one negative value on its diagonal. There is an error in the observation operator, please check it."%(selfA._name,))
2084         if logging.getLogger().level < logging.WARNING: # La verification n'a lieu qu'en debug
2085             try:
2086                 L = numpy.linalg.cholesky( A )
2087             except:
2088                 raise ValueError("The %s a posteriori covariance matrix A is not symmetric positive-definite. Please check your a priori covariances and your observation operator."%(selfA._name,))
2089     if selfA._toStore("APosterioriCovariance"):
2090         selfA.StoredVariables["APosterioriCovariance"].store( A )
2091     if selfA._toStore("JacobianMatrixAtOptimum"):
2092         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
2093     if selfA._toStore("KalmanGainAtOptimum"):
2094         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
2095         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
2096         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
2097     #
2098     # Calculs et/ou stockages supplémentaires
2099     # ---------------------------------------
2100     if selfA._toStore("Innovation") or \
2101         selfA._toStore("SigmaObs2") or \
2102         selfA._toStore("MahalanobisConsistency") or \
2103         selfA._toStore("OMB"):
2104         d  = Y - HXb
2105     if selfA._toStore("Innovation"):
2106         selfA.StoredVariables["Innovation"].store( numpy.ravel(d) )
2107     if selfA._toStore("BMA"):
2108         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
2109     if selfA._toStore("OMA"):
2110         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
2111     if selfA._toStore("OMB"):
2112         selfA.StoredVariables["OMB"].store( numpy.ravel(d) )
2113     if selfA._toStore("SigmaObs2"):
2114         TraceR = R.trace(Y.size)
2115         selfA.StoredVariables["SigmaObs2"].store( float( (d.T * (numpy.asmatrix(numpy.ravel(Y)).T-numpy.asmatrix(numpy.ravel(HXa)).T)) ) / TraceR )
2116     if selfA._toStore("MahalanobisConsistency"):
2117         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
2118     if selfA._toStore("SimulationQuantiles"):
2119         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
2120     if selfA._toStore("SimulatedObservationAtBackground"):
2121         selfA.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
2122     if selfA._toStore("SimulatedObservationAtOptimum"):
2123         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
2124     #
2125     return 0
2126
2127 # ==============================================================================
2128 def mlef(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="MLEF13",
2129     BnotT=False, _epsilon=1.e-3, _e=1.e-7, _jmax=15000):
2130     """
2131     Maximum Likelihood Ensemble Filter
2132     """
2133     if selfA._parameters["EstimationOf"] == "Parameters":
2134         selfA._parameters["StoreInternalVariables"] = True
2135     #
2136     # Opérateurs
2137     H = HO["Direct"].appliedControledFormTo
2138     #
2139     if selfA._parameters["EstimationOf"] == "State":
2140         M = EM["Direct"].appliedControledFormTo
2141     #
2142     if CM is not None and "Tangent" in CM and U is not None:
2143         Cm = CM["Tangent"].asMatrix(Xb)
2144     else:
2145         Cm = None
2146     #
2147     # Durée d'observation et tailles
2148     if hasattr(Y,"stepnumber"):
2149         duration = Y.stepnumber()
2150         __p = numpy.cumprod(Y.shape())[-1]
2151     else:
2152         duration = 2
2153         __p = numpy.array(Y).size
2154     #
2155     # Précalcul des inversions de B et R
2156     if selfA._parameters["StoreInternalVariables"] \
2157         or selfA._toStore("CostFunctionJ") \
2158         or selfA._toStore("CostFunctionJb") \
2159         or selfA._toStore("CostFunctionJo") \
2160         or selfA._toStore("CurrentOptimum") \
2161         or selfA._toStore("APosterioriCovariance"):
2162         BI = B.getI()
2163     RI = R.getI()
2164     #
2165     __n = Xb.size
2166     __m = selfA._parameters["NumberOfMembers"]
2167     #
2168     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
2169         Xn = EnsembleOfBackgroundPerturbations( Xb, None, __m )
2170         selfA.StoredVariables["Analysis"].store( Xb )
2171         if selfA._toStore("APosterioriCovariance"):
2172             if hasattr(B,"asfullmatrix"):
2173                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
2174             else:
2175                 selfA.StoredVariables["APosterioriCovariance"].store( B )
2176         selfA._setInternalState("seed", numpy.random.get_state())
2177     elif selfA._parameters["nextStep"]:
2178         Xn = selfA._getInternalState("Xn")
2179     #
2180     previousJMinimum = numpy.finfo(float).max
2181     #
2182     for step in range(duration-1):
2183         numpy.random.set_state(selfA._getInternalState("seed"))
2184         if hasattr(Y,"store"):
2185             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
2186         else:
2187             Ynpu = numpy.ravel( Y ).reshape((__p,1))
2188         #
2189         if U is not None:
2190             if hasattr(U,"store") and len(U)>1:
2191                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
2192             elif hasattr(U,"store") and len(U)==1:
2193                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
2194             else:
2195                 Un = numpy.asmatrix(numpy.ravel( U )).T
2196         else:
2197             Un = None
2198         #
2199         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
2200             Xn = CovarianceInflation( Xn,
2201                 selfA._parameters["InflationType"],
2202                 selfA._parameters["InflationFactor"],
2203                 )
2204         #
2205         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
2206             EMX = M( [(Xn[:,i], Un) for i in range(__m)],
2207                 argsAsSerie = True,
2208                 returnSerieAsArrayMatrix = True )
2209             Xn_predicted = EnsemblePerturbationWithGivenCovariance( EMX, Q )
2210             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
2211                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
2212                 Xn_predicted = Xn_predicted + Cm * Un
2213         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
2214             # --- > Par principe, M = Id, Q = 0
2215             Xn_predicted = EMX = Xn
2216         #
2217         #--------------------------
2218         if VariantM == "MLEF13":
2219             Xfm = numpy.ravel(Xn_predicted.mean(axis=1, dtype=mfp).astype('float'))
2220             EaX = EnsembleOfAnomalies( Xn_predicted, Xfm, 1./math.sqrt(__m-1) )
2221             Ua  = numpy.identity(__m)
2222             __j = 0
2223             Deltaw = 1
2224             if not BnotT:
2225                 Ta  = numpy.identity(__m)
2226             vw  = numpy.zeros(__m)
2227             while numpy.linalg.norm(Deltaw) >= _e and __j <= _jmax:
2228                 vx1 = (Xfm + EaX @ vw).reshape((__n,1))
2229                 #
2230                 if BnotT:
2231                     E1 = vx1 + _epsilon * EaX
2232                 else:
2233                     E1 = vx1 + math.sqrt(__m-1) * EaX @ Ta
2234                 #
2235                 HE2 = H( [(E1[:,i,numpy.newaxis], Un) for i in range(__m)],
2236                     argsAsSerie = True,
2237                     returnSerieAsArrayMatrix = True )
2238                 vy2 = HE2.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
2239                 #
2240                 if BnotT:
2241                     EaY = (HE2 - vy2) / _epsilon
2242                 else:
2243                     EaY = ( (HE2 - vy2) @ numpy.linalg.inv(Ta) ) / math.sqrt(__m-1)
2244                 #
2245                 GradJ = numpy.ravel(vw[:,None] - EaY.transpose() @ (RI * ( Ynpu - vy2 )))
2246                 mH = numpy.identity(__m) + EaY.transpose() @ (RI * EaY).reshape((-1,__m))
2247                 Deltaw = - numpy.linalg.solve(mH,GradJ)
2248                 #
2249                 vw = vw + Deltaw
2250                 #
2251                 if not BnotT:
2252                     Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
2253                 #
2254                 __j = __j + 1
2255             #
2256             if BnotT:
2257                 Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
2258             #
2259             Xn = vx1 + math.sqrt(__m-1) * EaX @ Ta @ Ua
2260         #--------------------------
2261         else:
2262             raise ValueError("VariantM has to be chosen in the authorized methods list.")
2263         #
2264         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
2265             Xn = CovarianceInflation( Xn,
2266                 selfA._parameters["InflationType"],
2267                 selfA._parameters["InflationFactor"],
2268                 )
2269         #
2270         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
2271         #--------------------------
2272         selfA._setInternalState("Xn", Xn)
2273         selfA._setInternalState("seed", numpy.random.get_state())
2274         #--------------------------
2275         #
2276         if selfA._parameters["StoreInternalVariables"] \
2277             or selfA._toStore("CostFunctionJ") \
2278             or selfA._toStore("CostFunctionJb") \
2279             or selfA._toStore("CostFunctionJo") \
2280             or selfA._toStore("APosterioriCovariance") \
2281             or selfA._toStore("InnovationAtCurrentAnalysis") \
2282             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
2283             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2284             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
2285             _Innovation = Ynpu - _HXa
2286         #
2287         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2288         # ---> avec analysis
2289         selfA.StoredVariables["Analysis"].store( Xa )
2290         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
2291             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
2292         if selfA._toStore("InnovationAtCurrentAnalysis"):
2293             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
2294         # ---> avec current state
2295         if selfA._parameters["StoreInternalVariables"] \
2296             or selfA._toStore("CurrentState"):
2297             selfA.StoredVariables["CurrentState"].store( Xn )
2298         if selfA._toStore("ForecastState"):
2299             selfA.StoredVariables["ForecastState"].store( EMX )
2300         if selfA._toStore("ForecastCovariance"):
2301             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(EMX) )
2302         if selfA._toStore("BMA"):
2303             selfA.StoredVariables["BMA"].store( EMX - Xa )
2304         if selfA._toStore("InnovationAtCurrentState"):
2305             selfA.StoredVariables["InnovationAtCurrentState"].store( - HE2 + Ynpu )
2306         if selfA._toStore("SimulatedObservationAtCurrentState") \
2307             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2308             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HE2 )
2309         # ---> autres
2310         if selfA._parameters["StoreInternalVariables"] \
2311             or selfA._toStore("CostFunctionJ") \
2312             or selfA._toStore("CostFunctionJb") \
2313             or selfA._toStore("CostFunctionJo") \
2314             or selfA._toStore("CurrentOptimum") \
2315             or selfA._toStore("APosterioriCovariance"):
2316             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
2317             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
2318             J   = Jb + Jo
2319             selfA.StoredVariables["CostFunctionJb"].store( Jb )
2320             selfA.StoredVariables["CostFunctionJo"].store( Jo )
2321             selfA.StoredVariables["CostFunctionJ" ].store( J )
2322             #
2323             if selfA._toStore("IndexOfOptimum") \
2324                 or selfA._toStore("CurrentOptimum") \
2325                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
2326                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
2327                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
2328                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2329                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2330             if selfA._toStore("IndexOfOptimum"):
2331                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
2332             if selfA._toStore("CurrentOptimum"):
2333                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
2334             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2335                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
2336             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
2337                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
2338             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
2339                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
2340             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
2341                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
2342         if selfA._toStore("APosterioriCovariance"):
2343             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
2344         if selfA._parameters["EstimationOf"] == "Parameters" \
2345             and J < previousJMinimum:
2346             previousJMinimum    = J
2347             XaMin               = Xa
2348             if selfA._toStore("APosterioriCovariance"):
2349                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
2350         # ---> Pour les smoothers
2351         if selfA._toStore("CurrentEnsembleState"):
2352             selfA.StoredVariables["CurrentEnsembleState"].store( Xn )
2353     #
2354     # Stockage final supplémentaire de l'optimum en estimation de paramètres
2355     # ----------------------------------------------------------------------
2356     if selfA._parameters["EstimationOf"] == "Parameters":
2357         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2358         selfA.StoredVariables["Analysis"].store( XaMin )
2359         if selfA._toStore("APosterioriCovariance"):
2360             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
2361         if selfA._toStore("BMA"):
2362             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
2363     #
2364     return 0
2365
2366 # ==============================================================================
2367 def mmqr(
2368         func     = None,
2369         x0       = None,
2370         fprime   = None,
2371         bounds   = None,
2372         quantile = 0.5,
2373         maxfun   = 15000,
2374         toler    = 1.e-06,
2375         y        = None,
2376         ):
2377     """
2378     Implémentation informatique de l'algorithme MMQR, basée sur la publication :
2379     David R. Hunter, Kenneth Lange, "Quantile Regression via an MM Algorithm",
2380     Journal of Computational and Graphical Statistics, 9, 1, pp.60-77, 2000.
2381     """
2382     #
2383     # Recuperation des donnees et informations initiales
2384     # --------------------------------------------------
2385     variables = numpy.ravel( x0 )
2386     mesures   = numpy.ravel( y )
2387     increment = sys.float_info[0]
2388     p         = variables.size
2389     n         = mesures.size
2390     quantile  = float(quantile)
2391     #
2392     # Calcul des parametres du MM
2393     # ---------------------------
2394     tn      = float(toler) / n
2395     e0      = -tn / math.log(tn)
2396     epsilon = (e0-tn)/(1+math.log(e0))
2397     #
2398     # Calculs d'initialisation
2399     # ------------------------
2400     residus  = mesures - numpy.ravel( func( variables ) )
2401     poids    = 1./(epsilon+numpy.abs(residus))
2402     veps     = 1. - 2. * quantile - residus * poids
2403     lastsurrogate = -numpy.sum(residus*veps) - (1.-2.*quantile)*numpy.sum(residus)
2404     iteration = 0
2405     #
2406     # Recherche iterative
2407     # -------------------
2408     while (increment > toler) and (iteration < maxfun) :
2409         iteration += 1
2410         #
2411         Derivees  = numpy.array(fprime(variables))
2412         Derivees  = Derivees.reshape(n,p) # Necessaire pour remettre en place la matrice si elle passe par des tuyaux YACS
2413         DeriveesT = Derivees.transpose()
2414         M         =   numpy.dot( DeriveesT , (numpy.array(numpy.matrix(p*[poids,]).T)*Derivees) )
2415         SM        =   numpy.transpose(numpy.dot( DeriveesT , veps ))
2416         step      = - numpy.linalg.lstsq( M, SM, rcond=-1 )[0]
2417         #
2418         variables = variables + step
2419         if bounds is not None:
2420             # Attention : boucle infinie à éviter si un intervalle est trop petit
2421             while( (variables < numpy.ravel(numpy.asmatrix(bounds)[:,0])).any() or (variables > numpy.ravel(numpy.asmatrix(bounds)[:,1])).any() ):
2422                 step      = step/2.
2423                 variables = variables - step
2424         residus   = mesures - numpy.ravel( func(variables) )
2425         surrogate = numpy.sum(residus**2 * poids) + (4.*quantile-2.) * numpy.sum(residus)
2426         #
2427         while ( (surrogate > lastsurrogate) and ( max(list(numpy.abs(step))) > 1.e-16 ) ) :
2428             step      = step/2.
2429             variables = variables - step
2430             residus   = mesures - numpy.ravel( func(variables) )
2431             surrogate = numpy.sum(residus**2 * poids) + (4.*quantile-2.) * numpy.sum(residus)
2432         #
2433         increment     = lastsurrogate-surrogate
2434         poids         = 1./(epsilon+numpy.abs(residus))
2435         veps          = 1. - 2. * quantile - residus * poids
2436         lastsurrogate = -numpy.sum(residus * veps) - (1.-2.*quantile)*numpy.sum(residus)
2437     #
2438     # Mesure d'écart
2439     # --------------
2440     Ecart = quantile * numpy.sum(residus) - numpy.sum( residus[residus<0] )
2441     #
2442     return variables, Ecart, [n,p,iteration,increment,0]
2443
2444 # ==============================================================================
2445 def multi3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, oneCycle):
2446     """
2447     3DVAR multi-pas et multi-méthodes
2448     """
2449     #
2450     # Initialisation
2451     if selfA._parameters["EstimationOf"] == "State":
2452         M = EM["Direct"].appliedTo
2453         #
2454         if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
2455             Xn = numpy.ravel(Xb).reshape((-1,1))
2456             selfA.StoredVariables["Analysis"].store( Xn )
2457             if selfA._toStore("APosterioriCovariance"):
2458                 if hasattr(B,"asfullmatrix"):
2459                     selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(Xn.size) )
2460                 else:
2461                     selfA.StoredVariables["APosterioriCovariance"].store( B )
2462             if selfA._toStore("ForecastState"):
2463                 selfA.StoredVariables["ForecastState"].store( Xn )
2464         elif selfA._parameters["nextStep"]:
2465             Xn = selfA._getInternalState("Xn")
2466     else:
2467         Xn = numpy.ravel(Xb).reshape((-1,1))
2468     #
2469     if hasattr(Y,"stepnumber"):
2470         duration = Y.stepnumber()
2471     else:
2472         duration = 2
2473     #
2474     # Multi-pas
2475     for step in range(duration-1):
2476         if hasattr(Y,"store"):
2477             Ynpu = numpy.ravel( Y[step+1] ).reshape((-1,1))
2478         else:
2479             Ynpu = numpy.ravel( Y ).reshape((-1,1))
2480         #
2481         if selfA._parameters["EstimationOf"] == "State": # Forecast
2482             Xn_predicted = M( Xn )
2483             if selfA._toStore("ForecastState"):
2484                 selfA.StoredVariables["ForecastState"].store( Xn_predicted )
2485         elif selfA._parameters["EstimationOf"] == "Parameters": # No forecast
2486             # --- > Par principe, M = Id, Q = 0
2487             Xn_predicted = Xn
2488         Xn_predicted = numpy.ravel(Xn_predicted).reshape((-1,1))
2489         #
2490         oneCycle(selfA, Xn_predicted, Ynpu, U, HO, None, None, R, B, None)
2491         #
2492         Xn = selfA.StoredVariables["Analysis"][-1]
2493         #--------------------------
2494         selfA._setInternalState("Xn", Xn)
2495     #
2496     return 0
2497
2498 # ==============================================================================
2499 def psas3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
2500     """
2501     3DVAR PSAS
2502     """
2503     #
2504     # Initialisations
2505     # ---------------
2506     #
2507     # Opérateurs
2508     Hm = HO["Direct"].appliedTo
2509     #
2510     # Utilisation éventuelle d'un vecteur H(Xb) précalculé
2511     if HO["AppliedInX"] is not None and "HXb" in HO["AppliedInX"]:
2512         HXb = Hm( Xb, HO["AppliedInX"]["HXb"] )
2513     else:
2514         HXb = Hm( Xb )
2515     HXb = numpy.asmatrix(numpy.ravel( HXb )).T
2516     if Y.size != HXb.size:
2517         raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
2518     if max(Y.shape) != max(HXb.shape):
2519         raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
2520     #
2521     if selfA._toStore("JacobianMatrixAtBackground"):
2522         HtMb = HO["Tangent"].asMatrix(ValueForMethodForm = Xb)
2523         HtMb = HtMb.reshape(Y.size,Xb.size) # ADAO & check shape
2524         selfA.StoredVariables["JacobianMatrixAtBackground"].store( HtMb )
2525     #
2526     Ht = HO["Tangent"].asMatrix(Xb)
2527     BHT = B * Ht.T
2528     HBHTpR = R + Ht * BHT
2529     Innovation = Y - HXb
2530     #
2531     # Point de démarrage de l'optimisation
2532     Xini = numpy.zeros(Xb.shape)
2533     #
2534     # Définition de la fonction-coût
2535     # ------------------------------
2536     def CostFunction(w):
2537         _W = numpy.asmatrix(numpy.ravel( w )).T
2538         if selfA._parameters["StoreInternalVariables"] or \
2539             selfA._toStore("CurrentState") or \
2540             selfA._toStore("CurrentOptimum"):
2541             selfA.StoredVariables["CurrentState"].store( Xb + BHT * _W )
2542         if selfA._toStore("SimulatedObservationAtCurrentState") or \
2543             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2544             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( Hm( Xb + BHT * _W ) )
2545         if selfA._toStore("InnovationAtCurrentState"):
2546             selfA.StoredVariables["InnovationAtCurrentState"].store( Innovation )
2547         #
2548         Jb  = float( 0.5 * _W.T * HBHTpR * _W )
2549         Jo  = float( - _W.T * Innovation )
2550         J   = Jb + Jo
2551         #
2552         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
2553         selfA.StoredVariables["CostFunctionJb"].store( Jb )
2554         selfA.StoredVariables["CostFunctionJo"].store( Jo )
2555         selfA.StoredVariables["CostFunctionJ" ].store( J )
2556         if selfA._toStore("IndexOfOptimum") or \
2557             selfA._toStore("CurrentOptimum") or \
2558             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
2559             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
2560             selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
2561             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2562             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2563         if selfA._toStore("IndexOfOptimum"):
2564             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
2565         if selfA._toStore("CurrentOptimum"):
2566             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
2567         if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2568             selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
2569         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
2570             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
2571         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
2572             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
2573         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
2574             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
2575         return J
2576     #
2577     def GradientOfCostFunction(w):
2578         _W = numpy.asmatrix(numpy.ravel( w )).T
2579         GradJb  = HBHTpR * _W
2580         GradJo  = - Innovation
2581         GradJ   = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
2582         return GradJ
2583     #
2584     # Minimisation de la fonctionnelle
2585     # --------------------------------
2586     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
2587     #
2588     if selfA._parameters["Minimizer"] == "LBFGSB":
2589         if "0.19" <= scipy.version.version <= "1.1.0":
2590             import lbfgsbhlt as optimiseur
2591         else:
2592             import scipy.optimize as optimiseur
2593         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
2594             func        = CostFunction,
2595             x0          = Xini,
2596             fprime      = GradientOfCostFunction,
2597             args        = (),
2598             bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
2599             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
2600             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
2601             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
2602             iprint      = selfA._parameters["optiprint"],
2603             )
2604         nfeval = Informations['funcalls']
2605         rc     = Informations['warnflag']
2606     elif selfA._parameters["Minimizer"] == "TNC":
2607         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
2608             func        = CostFunction,
2609             x0          = Xini,
2610             fprime      = GradientOfCostFunction,
2611             args        = (),
2612             bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
2613             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
2614             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
2615             ftol        = selfA._parameters["CostDecrementTolerance"],
2616             messages    = selfA._parameters["optmessages"],
2617             )
2618     elif selfA._parameters["Minimizer"] == "CG":
2619         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
2620             f           = CostFunction,
2621             x0          = Xini,
2622             fprime      = GradientOfCostFunction,
2623             args        = (),
2624             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2625             gtol        = selfA._parameters["GradientNormTolerance"],
2626             disp        = selfA._parameters["optdisp"],
2627             full_output = True,
2628             )
2629     elif selfA._parameters["Minimizer"] == "NCG":
2630         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
2631             f           = CostFunction,
2632             x0          = Xini,
2633             fprime      = GradientOfCostFunction,
2634             args        = (),
2635             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2636             avextol     = selfA._parameters["CostDecrementTolerance"],
2637             disp        = selfA._parameters["optdisp"],
2638             full_output = True,
2639             )
2640     elif selfA._parameters["Minimizer"] == "BFGS":
2641         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
2642             f           = CostFunction,
2643             x0          = Xini,
2644             fprime      = GradientOfCostFunction,
2645             args        = (),
2646             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2647             gtol        = selfA._parameters["GradientNormTolerance"],
2648             disp        = selfA._parameters["optdisp"],
2649             full_output = True,
2650             )
2651     else:
2652         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
2653     #
2654     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2655     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
2656     #
2657     # Correction pour pallier a un bug de TNC sur le retour du Minimum
2658     # ----------------------------------------------------------------
2659     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
2660         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
2661         Minimum = numpy.asmatrix(numpy.ravel( Minimum )).T
2662     else:
2663         Minimum = Xb + BHT * numpy.asmatrix(numpy.ravel( Minimum )).T
2664     #
2665     # Obtention de l'analyse
2666     # ----------------------
2667     Xa = Minimum
2668     #
2669     selfA.StoredVariables["Analysis"].store( Xa )
2670     #
2671     if selfA._toStore("OMA") or \
2672         selfA._toStore("SigmaObs2") or \
2673         selfA._toStore("SimulationQuantiles") or \
2674         selfA._toStore("SimulatedObservationAtOptimum"):
2675         if selfA._toStore("SimulatedObservationAtCurrentState"):
2676             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
2677         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2678             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
2679         else:
2680             HXa = Hm( Xa )
2681     #
2682     # Calcul de la covariance d'analyse
2683     # ---------------------------------
2684     if selfA._toStore("APosterioriCovariance") or \
2685         selfA._toStore("SimulationQuantiles") or \
2686         selfA._toStore("JacobianMatrixAtOptimum") or \
2687         selfA._toStore("KalmanGainAtOptimum"):
2688         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
2689         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
2690     if selfA._toStore("APosterioriCovariance") or \
2691         selfA._toStore("SimulationQuantiles") or \
2692         selfA._toStore("KalmanGainAtOptimum"):
2693         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
2694         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
2695     if selfA._toStore("APosterioriCovariance") or \
2696         selfA._toStore("SimulationQuantiles"):
2697         BI = B.getI()
2698         RI = R.getI()
2699         HessienneI = []
2700         nb = Xa.size
2701         for i in range(nb):
2702             _ee    = numpy.matrix(numpy.zeros(nb)).T
2703             _ee[i] = 1.
2704             _HtEE  = numpy.dot(HtM,_ee)
2705             _HtEE  = numpy.asmatrix(numpy.ravel( _HtEE )).T
2706             HessienneI.append( numpy.ravel( BI*_ee + HaM * (RI * _HtEE) ) )
2707         HessienneI = numpy.matrix( HessienneI )
2708         A = HessienneI.I
2709         if min(A.shape) != max(A.shape):
2710             raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(selfA._name,str(A.shape)))
2711         if (numpy.diag(A) < 0).any():
2712             raise ValueError("The %s a posteriori covariance matrix A has at least one negative value on its diagonal. There is an error in the observation operator, please check it."%(selfA._name,))
2713         if logging.getLogger().level < logging.WARNING: # La verification n'a lieu qu'en debug
2714             try:
2715                 L = numpy.linalg.cholesky( A )
2716             except:
2717                 raise ValueError("The %s a posteriori covariance matrix A is not symmetric positive-definite. Please check your a priori covariances and your observation operator."%(selfA._name,))
2718     if selfA._toStore("APosterioriCovariance"):
2719         selfA.StoredVariables["APosterioriCovariance"].store( A )
2720     if selfA._toStore("JacobianMatrixAtOptimum"):
2721         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
2722     if selfA._toStore("KalmanGainAtOptimum"):
2723         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
2724         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
2725         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
2726     #
2727     # Calculs et/ou stockages supplémentaires
2728     # ---------------------------------------
2729     if selfA._toStore("Innovation") or \
2730         selfA._toStore("SigmaObs2") or \
2731         selfA._toStore("MahalanobisConsistency") or \
2732         selfA._toStore("OMB"):
2733         d  = Y - HXb
2734     if selfA._toStore("Innovation"):
2735         selfA.StoredVariables["Innovation"].store( numpy.ravel(d) )
2736     if selfA._toStore("BMA"):
2737         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
2738     if selfA._toStore("OMA"):
2739         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
2740     if selfA._toStore("OMB"):
2741         selfA.StoredVariables["OMB"].store( numpy.ravel(d) )
2742     if selfA._toStore("SigmaObs2"):
2743         TraceR = R.trace(Y.size)
2744         selfA.StoredVariables["SigmaObs2"].store( float( (d.T * (numpy.asmatrix(numpy.ravel(Y)).T-numpy.asmatrix(numpy.ravel(HXa)).T)) ) / TraceR )
2745     if selfA._toStore("MahalanobisConsistency"):
2746         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
2747     if selfA._toStore("SimulationQuantiles"):
2748         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
2749     if selfA._toStore("SimulatedObservationAtBackground"):
2750         selfA.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
2751     if selfA._toStore("SimulatedObservationAtOptimum"):
2752         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
2753     #
2754     return 0
2755
2756 # ==============================================================================
2757 def senkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula16"):
2758     """
2759     Stochastic EnKF
2760     """
2761     if selfA._parameters["EstimationOf"] == "Parameters":
2762         selfA._parameters["StoreInternalVariables"] = True
2763     #
2764     # Opérateurs
2765     H = HO["Direct"].appliedControledFormTo
2766     #
2767     if selfA._parameters["EstimationOf"] == "State":
2768         M = EM["Direct"].appliedControledFormTo
2769     #
2770     if CM is not None and "Tangent" in CM and U is not None:
2771         Cm = CM["Tangent"].asMatrix(Xb)
2772     else:
2773         Cm = None
2774     #
2775     # Durée d'observation et tailles
2776     if hasattr(Y,"stepnumber"):
2777         duration = Y.stepnumber()
2778         __p = numpy.cumprod(Y.shape())[-1]
2779     else:
2780         duration = 2
2781         __p = numpy.array(Y).size
2782     #
2783     # Précalcul des inversions de B et R
2784     if selfA._parameters["StoreInternalVariables"] \
2785         or selfA._toStore("CostFunctionJ") \
2786         or selfA._toStore("CostFunctionJb") \
2787         or selfA._toStore("CostFunctionJo") \
2788         or selfA._toStore("CurrentOptimum") \
2789         or selfA._toStore("APosterioriCovariance"):
2790         BI = B.getI()
2791         RI = R.getI()
2792     #
2793     __n = Xb.size
2794     __m = selfA._parameters["NumberOfMembers"]
2795     #
2796     if hasattr(R,"asfullmatrix"): Rn = R.asfullmatrix(__p)
2797     else:                         Rn = R
2798     #
2799     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
2800         if hasattr(B,"asfullmatrix"): Pn = B.asfullmatrix(__n)
2801         else:                         Pn = B
2802         Xn = EnsembleOfBackgroundPerturbations( Xb, Pn, __m )
2803         selfA.StoredVariables["Analysis"].store( Xb )
2804         if selfA._toStore("APosterioriCovariance"):
2805             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
2806         selfA._setInternalState("seed", numpy.random.get_state())
2807     elif selfA._parameters["nextStep"]:
2808         Xn = selfA._getInternalState("Xn")
2809     #
2810     previousJMinimum = numpy.finfo(float).max
2811     #
2812     for step in range(duration-1):
2813         numpy.random.set_state(selfA._getInternalState("seed"))
2814         if hasattr(Y,"store"):
2815             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
2816         else:
2817             Ynpu = numpy.ravel( Y ).reshape((__p,1))
2818         #
2819         if U is not None:
2820             if hasattr(U,"store") and len(U)>1:
2821                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
2822             elif hasattr(U,"store") and len(U)==1:
2823                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
2824             else:
2825                 Un = numpy.asmatrix(numpy.ravel( U )).T
2826         else:
2827             Un = None
2828         #
2829         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
2830             Xn = CovarianceInflation( Xn,
2831                 selfA._parameters["InflationType"],
2832                 selfA._parameters["InflationFactor"],
2833                 )
2834         #
2835         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
2836             EMX = M( [(Xn[:,i], Un) for i in range(__m)],
2837                 argsAsSerie = True,
2838                 returnSerieAsArrayMatrix = True )
2839             Xn_predicted = EnsemblePerturbationWithGivenCovariance( EMX, Q )
2840             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
2841                 argsAsSerie = True,
2842                 returnSerieAsArrayMatrix = True )
2843             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
2844                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
2845                 Xn_predicted = Xn_predicted + Cm * Un
2846         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
2847             # --- > Par principe, M = Id, Q = 0
2848             Xn_predicted = EMX = Xn
2849             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
2850                 argsAsSerie = True,
2851                 returnSerieAsArrayMatrix = True )
2852         #
2853         # Mean of forecast and observation of forecast
2854         Xfm  = Xn_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
2855         Hfm  = HX_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
2856         #
2857         #--------------------------
2858         if VariantM == "KalmanFilterFormula05":
2859             PfHT, HPfHT = 0., 0.
2860             for i in range(__m):
2861                 Exfi = Xn_predicted[:,i].reshape((__n,1)) - Xfm
2862                 Eyfi = HX_predicted[:,i].reshape((__p,1)) - Hfm
2863                 PfHT  += Exfi * Eyfi.T
2864                 HPfHT += Eyfi * Eyfi.T
2865             PfHT  = (1./(__m-1)) * PfHT
2866             HPfHT = (1./(__m-1)) * HPfHT
2867             Kn     = PfHT * ( R + HPfHT ).I
2868             del PfHT, HPfHT
2869             #
2870             for i in range(__m):
2871                 ri = numpy.random.multivariate_normal(numpy.zeros(__p), Rn)
2872                 Xn[:,i] = numpy.ravel(Xn_predicted[:,i]) + Kn @ (numpy.ravel(Ynpu) + ri - HX_predicted[:,i])
2873         #--------------------------
2874         elif VariantM == "KalmanFilterFormula16":
2875             EpY   = EnsembleOfCenteredPerturbations(Ynpu, Rn, __m)
2876             EpYm  = EpY.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
2877             #
2878             EaX   = EnsembleOfAnomalies( Xn_predicted ) / math.sqrt(__m-1)
2879             EaY = (HX_predicted - Hfm - EpY + EpYm) / math.sqrt(__m-1)
2880             #
2881             Kn = EaX @ EaY.T @ numpy.linalg.inv( EaY @ EaY.T)
2882             #
2883             for i in range(__m):
2884                 Xn[:,i] = numpy.ravel(Xn_predicted[:,i]) + Kn @ (numpy.ravel(EpY[:,i]) - HX_predicted[:,i])
2885         #--------------------------
2886         else:
2887             raise ValueError("VariantM has to be chosen in the authorized methods list.")
2888         #
2889         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
2890             Xn = CovarianceInflation( Xn,
2891                 selfA._parameters["InflationType"],
2892                 selfA._parameters["InflationFactor"],
2893                 )
2894         #
2895         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
2896         #--------------------------
2897         selfA._setInternalState("Xn", Xn)
2898         selfA._setInternalState("seed", numpy.random.get_state())
2899         #--------------------------
2900         #
2901         if selfA._parameters["StoreInternalVariables"] \
2902             or selfA._toStore("CostFunctionJ") \
2903             or selfA._toStore("CostFunctionJb") \
2904             or selfA._toStore("CostFunctionJo") \
2905             or selfA._toStore("APosterioriCovariance") \
2906             or selfA._toStore("InnovationAtCurrentAnalysis") \
2907             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
2908             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2909             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
2910             _Innovation = Ynpu - _HXa
2911         #
2912         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2913         # ---> avec analysis
2914         selfA.StoredVariables["Analysis"].store( Xa )
2915         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
2916             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
2917         if selfA._toStore("InnovationAtCurrentAnalysis"):
2918             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
2919         # ---> avec current state
2920         if selfA._parameters["StoreInternalVariables"] \
2921             or selfA._toStore("CurrentState"):
2922             selfA.StoredVariables["CurrentState"].store( Xn )
2923         if selfA._toStore("ForecastState"):
2924             selfA.StoredVariables["ForecastState"].store( EMX )
2925         if selfA._toStore("ForecastCovariance"):
2926             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(EMX) )
2927         if selfA._toStore("BMA"):
2928             selfA.StoredVariables["BMA"].store( EMX - Xa )
2929         if selfA._toStore("InnovationAtCurrentState"):
2930             selfA.StoredVariables["InnovationAtCurrentState"].store( - HX_predicted + Ynpu )
2931         if selfA._toStore("SimulatedObservationAtCurrentState") \
2932             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2933             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
2934         # ---> autres
2935         if selfA._parameters["StoreInternalVariables"] \
2936             or selfA._toStore("CostFunctionJ") \
2937             or selfA._toStore("CostFunctionJb") \
2938             or selfA._toStore("CostFunctionJo") \
2939             or selfA._toStore("CurrentOptimum") \
2940             or selfA._toStore("APosterioriCovariance"):
2941             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
2942             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
2943             J   = Jb + Jo
2944             selfA.StoredVariables["CostFunctionJb"].store( Jb )
2945             selfA.StoredVariables["CostFunctionJo"].store( Jo )
2946             selfA.StoredVariables["CostFunctionJ" ].store( J )
2947             #
2948             if selfA._toStore("IndexOfOptimum") \
2949                 or selfA._toStore("CurrentOptimum") \
2950                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
2951                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
2952                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
2953                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2954                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2955             if selfA._toStore("IndexOfOptimum"):
2956                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
2957             if selfA._toStore("CurrentOptimum"):
2958                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
2959             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2960                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
2961             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
2962                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
2963             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
2964                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
2965             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
2966                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
2967         if selfA._toStore("APosterioriCovariance"):
2968             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
2969         if selfA._parameters["EstimationOf"] == "Parameters" \
2970             and J < previousJMinimum:
2971             previousJMinimum    = J
2972             XaMin               = Xa
2973             if selfA._toStore("APosterioriCovariance"):
2974                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
2975         # ---> Pour les smoothers
2976         if selfA._toStore("CurrentEnsembleState"):
2977             selfA.StoredVariables["CurrentEnsembleState"].store( Xn )
2978     #
2979     # Stockage final supplémentaire de l'optimum en estimation de paramètres
2980     # ----------------------------------------------------------------------
2981     if selfA._parameters["EstimationOf"] == "Parameters":
2982         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2983         selfA.StoredVariables["Analysis"].store( XaMin )
2984         if selfA._toStore("APosterioriCovariance"):
2985             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
2986         if selfA._toStore("BMA"):
2987             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
2988     #
2989     return 0
2990
2991 # ==============================================================================
2992 def std3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
2993     """
2994     3DVAR
2995     """
2996     #
2997     # Initialisations
2998     # ---------------
2999     #
3000     # Opérateurs
3001     Hm = HO["Direct"].appliedTo
3002     Ha = HO["Adjoint"].appliedInXTo
3003     #
3004     # Utilisation éventuelle d'un vecteur H(Xb) précalculé
3005     if HO["AppliedInX"] is not None and "HXb" in HO["AppliedInX"]:
3006         HXb = Hm( Xb, HO["AppliedInX"]["HXb"] )
3007     else:
3008         HXb = Hm( Xb )
3009     HXb = HXb.reshape((-1,1))
3010     if Y.size != HXb.size:
3011         raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
3012     if max(Y.shape) != max(HXb.shape):
3013         raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
3014     #
3015     if selfA._toStore("JacobianMatrixAtBackground"):
3016         HtMb = HO["Tangent"].asMatrix(ValueForMethodForm = Xb)
3017         HtMb = HtMb.reshape(Y.size,Xb.size) # ADAO & check shape
3018         selfA.StoredVariables["JacobianMatrixAtBackground"].store( HtMb )
3019     #
3020     # Précalcul des inversions de B et R
3021     BI = B.getI()
3022     RI = R.getI()
3023     #
3024     # Point de démarrage de l'optimisation
3025     Xini = selfA._parameters["InitializationPoint"]
3026     #
3027     # Définition de la fonction-coût
3028     # ------------------------------
3029     def CostFunction(x):
3030         _X  = numpy.ravel( x ).reshape((-1,1))
3031         if selfA._parameters["StoreInternalVariables"] or \
3032             selfA._toStore("CurrentState") or \
3033             selfA._toStore("CurrentOptimum"):
3034             selfA.StoredVariables["CurrentState"].store( _X )
3035         _HX = Hm( _X ).reshape((-1,1))
3036         _Innovation = Y - _HX
3037         if selfA._toStore("SimulatedObservationAtCurrentState") or \
3038             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3039             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
3040         if selfA._toStore("InnovationAtCurrentState"):
3041             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
3042         #
3043         Jb  = float( 0.5 * (_X - Xb).T * (BI * (_X - Xb)) )
3044         Jo  = float( 0.5 * _Innovation.T * (RI * _Innovation) )
3045         J   = Jb + Jo
3046         #
3047         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
3048         selfA.StoredVariables["CostFunctionJb"].store( Jb )
3049         selfA.StoredVariables["CostFunctionJo"].store( Jo )
3050         selfA.StoredVariables["CostFunctionJ" ].store( J )
3051         if selfA._toStore("IndexOfOptimum") or \
3052             selfA._toStore("CurrentOptimum") or \
3053             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
3054             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
3055             selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
3056             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3057             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3058         if selfA._toStore("IndexOfOptimum"):
3059             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3060         if selfA._toStore("CurrentOptimum"):
3061             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
3062         if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3063             selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
3064         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3065             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3066         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3067             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3068         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3069             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3070         return J
3071     #
3072     def GradientOfCostFunction(x):
3073         _X      = numpy.ravel( x ).reshape((-1,1))
3074         _HX     = Hm( _X )
3075         _HX     = numpy.ravel( _HX ).reshape((-1,1))
3076         GradJb  = BI * (_X - Xb)
3077         GradJo  = - Ha( (_X, RI * (Y - _HX)) )
3078         GradJ   = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
3079         return GradJ
3080     #
3081     # Minimisation de la fonctionnelle
3082     # --------------------------------
3083     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
3084     #
3085     if selfA._parameters["Minimizer"] == "LBFGSB":
3086         if "0.19" <= scipy.version.version <= "1.1.0":
3087             import lbfgsbhlt as optimiseur
3088         else:
3089             import scipy.optimize as optimiseur
3090         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
3091             func        = CostFunction,
3092             x0          = Xini,
3093             fprime      = GradientOfCostFunction,
3094             args        = (),
3095             bounds      = selfA._parameters["Bounds"],
3096             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
3097             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
3098             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3099             iprint      = selfA._parameters["optiprint"],
3100             )
3101         nfeval = Informations['funcalls']
3102         rc     = Informations['warnflag']
3103     elif selfA._parameters["Minimizer"] == "TNC":
3104         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
3105             func        = CostFunction,
3106             x0          = Xini,
3107             fprime      = GradientOfCostFunction,
3108             args        = (),
3109             bounds      = selfA._parameters["Bounds"],
3110             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
3111             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3112             ftol        = selfA._parameters["CostDecrementTolerance"],
3113             messages    = selfA._parameters["optmessages"],
3114             )
3115     elif selfA._parameters["Minimizer"] == "CG":
3116         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
3117             f           = CostFunction,
3118             x0          = Xini,
3119             fprime      = GradientOfCostFunction,
3120             args        = (),
3121             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3122             gtol        = selfA._parameters["GradientNormTolerance"],
3123             disp        = selfA._parameters["optdisp"],
3124             full_output = True,
3125             )
3126     elif selfA._parameters["Minimizer"] == "NCG":
3127         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
3128             f           = CostFunction,
3129             x0          = Xini,
3130             fprime      = GradientOfCostFunction,
3131             args        = (),
3132             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3133             avextol     = selfA._parameters["CostDecrementTolerance"],
3134             disp        = selfA._parameters["optdisp"],
3135             full_output = True,
3136             )
3137     elif selfA._parameters["Minimizer"] == "BFGS":
3138         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
3139             f           = CostFunction,
3140             x0          = Xini,
3141             fprime      = GradientOfCostFunction,
3142             args        = (),
3143             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3144             gtol        = selfA._parameters["GradientNormTolerance"],
3145             disp        = selfA._parameters["optdisp"],
3146             full_output = True,
3147             )
3148     else:
3149         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
3150     #
3151     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3152     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
3153     #
3154     # Correction pour pallier a un bug de TNC sur le retour du Minimum
3155     # ----------------------------------------------------------------
3156     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
3157         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
3158     #
3159     Xa = Minimum
3160     #--------------------------
3161     #
3162     selfA.StoredVariables["Analysis"].store( Xa )
3163     #
3164     if selfA._toStore("OMA") or \
3165         selfA._toStore("SigmaObs2") or \
3166         selfA._toStore("SimulationQuantiles") or \
3167         selfA._toStore("SimulatedObservationAtOptimum"):
3168         if selfA._toStore("SimulatedObservationAtCurrentState"):
3169             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
3170         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3171             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
3172         else:
3173             HXa = Hm( Xa )
3174     #
3175     if selfA._toStore("APosterioriCovariance") or \
3176         selfA._toStore("SimulationQuantiles") or \
3177         selfA._toStore("JacobianMatrixAtOptimum") or \
3178         selfA._toStore("KalmanGainAtOptimum"):
3179         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
3180         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
3181     if selfA._toStore("APosterioriCovariance") or \
3182         selfA._toStore("SimulationQuantiles") or \
3183         selfA._toStore("KalmanGainAtOptimum"):
3184         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
3185         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
3186     if selfA._toStore("APosterioriCovariance") or \
3187         selfA._toStore("SimulationQuantiles"):
3188         HessienneI = []
3189         nb = Xa.size
3190         for i in range(nb):
3191             _ee    = numpy.zeros(nb)
3192             _ee[i] = 1.
3193             _HtEE  = numpy.dot(HtM,_ee)
3194             HessienneI.append( numpy.ravel( BI * _ee.reshape((-1,1)) + HaM * (RI * _HtEE.reshape((-1,1))) ) )
3195         A = numpy.linalg.inv(numpy.array( HessienneI ))
3196         if min(A.shape) != max(A.shape):
3197             raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(selfA._name,str(A.shape)))
3198         if (numpy.diag(A) < 0).any():
3199             raise ValueError("The %s a posteriori covariance matrix A has at least one negative value on its diagonal. There is an error in the observation operator, please check it."%(selfA._name,))
3200         if logging.getLogger().level < logging.WARNING: # La verification n'a lieu qu'en debug
3201             try:
3202                 L = numpy.linalg.cholesky( A )
3203             except:
3204                 raise ValueError("The %s a posteriori covariance matrix A is not symmetric positive-definite. Please check your a priori covariances and your observation operator."%(selfA._name,))
3205     if selfA._toStore("APosterioriCovariance"):
3206         selfA.StoredVariables["APosterioriCovariance"].store( A )
3207     if selfA._toStore("JacobianMatrixAtOptimum"):
3208         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
3209     if selfA._toStore("KalmanGainAtOptimum"):
3210         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
3211         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
3212         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
3213     #
3214     if selfA._toStore("Innovation") or \
3215         selfA._toStore("SigmaObs2") or \
3216         selfA._toStore("MahalanobisConsistency") or \
3217         selfA._toStore("OMB"):
3218         d  = Y - HXb
3219     if selfA._toStore("Innovation"):
3220         selfA.StoredVariables["Innovation"].store( d )
3221     if selfA._toStore("BMA"):
3222         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
3223     if selfA._toStore("OMA"):
3224         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
3225     if selfA._toStore("OMB"):
3226         selfA.StoredVariables["OMB"].store( d )
3227     if selfA._toStore("SigmaObs2"):
3228         TraceR = R.trace(Y.size)
3229         selfA.StoredVariables["SigmaObs2"].store( float( (d.T @ (numpy.ravel(Y)-numpy.ravel(HXa))) ) / TraceR )
3230     if selfA._toStore("MahalanobisConsistency"):
3231         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
3232     if selfA._toStore("SimulationQuantiles"):
3233         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
3234     if selfA._toStore("SimulatedObservationAtBackground"):
3235         selfA.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
3236     if selfA._toStore("SimulatedObservationAtOptimum"):
3237         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
3238     #
3239     return 0
3240
3241 # ==============================================================================
3242 def std4dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3243     """
3244     4DVAR
3245     """
3246     #
3247     # Initialisations
3248     # ---------------
3249     #
3250     # Opérateurs
3251     Hm = HO["Direct"].appliedControledFormTo
3252     Mm = EM["Direct"].appliedControledFormTo
3253     #
3254     if CM is not None and "Tangent" in CM and U is not None:
3255         Cm = CM["Tangent"].asMatrix(Xb)
3256     else:
3257         Cm = None
3258     #
3259     def Un(_step):
3260         if U is not None:
3261             if hasattr(U,"store") and 1<=_step<len(U) :
3262                 _Un = numpy.asmatrix(numpy.ravel( U[_step] )).T
3263             elif hasattr(U,"store") and len(U)==1:
3264                 _Un = numpy.asmatrix(numpy.ravel( U[0] )).T
3265             else:
3266                 _Un = numpy.asmatrix(numpy.ravel( U )).T
3267         else:
3268             _Un = None
3269         return _Un
3270     def CmUn(_xn,_un):
3271         if Cm is not None and _un is not None: # Attention : si Cm est aussi dans M, doublon !
3272             _Cm   = Cm.reshape(_xn.size,_un.size) # ADAO & check shape
3273             _CmUn = _Cm * _un
3274         else:
3275             _CmUn = 0.
3276         return _CmUn
3277     #
3278     # Remarque : les observations sont exploitées à partir du pas de temps
3279     # numéro 1, et sont utilisées dans Yo comme rangées selon ces indices.
3280     # Donc le pas 0 n'est pas utilisé puisque la première étape commence
3281     # avec l'observation du pas 1.
3282     #
3283     # Nombre de pas identique au nombre de pas d'observations
3284     if hasattr(Y,"stepnumber"):
3285         duration = Y.stepnumber()
3286     else:
3287         duration = 2
3288     #
3289     # Précalcul des inversions de B et R
3290     BI = B.getI()
3291     RI = R.getI()
3292     #
3293     # Point de démarrage de l'optimisation
3294     Xini = selfA._parameters["InitializationPoint"]
3295     #
3296     # Définition de la fonction-coût
3297     # ------------------------------
3298     selfA.DirectCalculation = [None,] # Le pas 0 n'est pas observé
3299     selfA.DirectInnovation  = [None,] # Le pas 0 n'est pas observé
3300     def CostFunction(x):
3301         _X  = numpy.asmatrix(numpy.ravel( x )).T
3302         if selfA._parameters["StoreInternalVariables"] or \
3303             selfA._toStore("CurrentState") or \
3304             selfA._toStore("CurrentOptimum"):
3305             selfA.StoredVariables["CurrentState"].store( _X )
3306         Jb  = float( 0.5 * (_X - Xb).T * BI * (_X - Xb) )
3307         selfA.DirectCalculation = [None,]
3308         selfA.DirectInnovation  = [None,]
3309         Jo  = 0.
3310         _Xn = _X
3311         for step in range(0,duration-1):
3312             if hasattr(Y,"store"):
3313                 _Ynpu = numpy.asmatrix(numpy.ravel( Y[step+1] )).T
3314             else:
3315                 _Ynpu = numpy.asmatrix(numpy.ravel( Y )).T
3316             _Un = Un(step)
3317             #
3318             # Etape d'évolution
3319             if selfA._parameters["EstimationOf"] == "State":
3320                 _Xn = Mm( (_Xn, _Un) ) + CmUn(_Xn, _Un)
3321             elif selfA._parameters["EstimationOf"] == "Parameters":
3322                 pass
3323             #
3324             if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3325                 __Bounds = ForceNumericBounds( selfA._parameters["Bounds"] )
3326                 _Xn = numpy.max(numpy.hstack((_Xn,numpy.asmatrix(__Bounds)[:,0])),axis=1)
3327                 _Xn = numpy.min(numpy.hstack((_Xn,numpy.asmatrix(__Bounds)[:,1])),axis=1)
3328             #
3329             # Etape de différence aux observations
3330             if selfA._parameters["EstimationOf"] == "State":
3331                 _YmHMX = _Ynpu - numpy.asmatrix(numpy.ravel( Hm( (_Xn, None) ) )).T
3332             elif selfA._parameters["EstimationOf"] == "Parameters":
3333                 _YmHMX = _Ynpu - numpy.asmatrix(numpy.ravel( Hm( (_Xn, _Un) ) )).T - CmUn(_Xn, _Un)
3334             #
3335             # Stockage de l'état
3336             selfA.DirectCalculation.append( _Xn )
3337             selfA.DirectInnovation.append( _YmHMX )
3338             #
3339             # Ajout dans la fonctionnelle d'observation
3340             Jo = Jo + 0.5 * float( _YmHMX.T * RI * _YmHMX )
3341         J = Jb + Jo
3342         #
3343         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
3344         selfA.StoredVariables["CostFunctionJb"].store( Jb )
3345         selfA.StoredVariables["CostFunctionJo"].store( Jo )
3346         selfA.StoredVariables["CostFunctionJ" ].store( J )
3347         if selfA._toStore("IndexOfOptimum") or \
3348             selfA._toStore("CurrentOptimum") or \
3349             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
3350             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
3351             selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3352             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3353         if selfA._toStore("IndexOfOptimum"):
3354             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3355         if selfA._toStore("CurrentOptimum"):
3356             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
3357         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3358             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3359         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3360             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3361         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3362             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3363         return J
3364     #
3365     def GradientOfCostFunction(x):
3366         _X      = numpy.asmatrix(numpy.ravel( x )).T
3367         GradJb  = BI * (_X - Xb)
3368         GradJo  = 0.
3369         for step in range(duration-1,0,-1):
3370             # Étape de récupération du dernier stockage de l'évolution
3371             _Xn = selfA.DirectCalculation.pop()
3372             # Étape de récupération du dernier stockage de l'innovation
3373             _YmHMX = selfA.DirectInnovation.pop()
3374             # Calcul des adjoints
3375             Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = _Xn)
3376             Ha = Ha.reshape(_Xn.size,_YmHMX.size) # ADAO & check shape
3377             Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = _Xn)
3378             Ma = Ma.reshape(_Xn.size,_Xn.size) # ADAO & check shape
3379             # Calcul du gradient par état adjoint
3380             GradJo = GradJo + Ha * RI * _YmHMX # Équivaut pour Ha linéaire à : Ha( (_Xn, RI * _YmHMX) )
3381             GradJo = Ma * GradJo               # Équivaut pour Ma linéaire à : Ma( (_Xn, GradJo) )
3382         GradJ = numpy.ravel( GradJb ) - numpy.ravel( GradJo )
3383         return GradJ
3384     #
3385     # Minimisation de la fonctionnelle
3386     # --------------------------------
3387     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
3388     #
3389     if selfA._parameters["Minimizer"] == "LBFGSB":
3390         if "0.19" <= scipy.version.version <= "1.1.0":
3391             import lbfgsbhlt as optimiseur
3392         else:
3393             import scipy.optimize as optimiseur
3394         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
3395             func        = CostFunction,
3396             x0          = Xini,
3397             fprime      = GradientOfCostFunction,
3398             args        = (),
3399             bounds      = selfA._parameters["Bounds"],
3400             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
3401             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
3402             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3403             iprint      = selfA._parameters["optiprint"],
3404             )
3405         nfeval = Informations['funcalls']
3406         rc     = Informations['warnflag']
3407     elif selfA._parameters["Minimizer"] == "TNC":
3408         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
3409             func        = CostFunction,
3410             x0          = Xini,
3411             fprime      = GradientOfCostFunction,
3412             args        = (),
3413             bounds      = selfA._parameters["Bounds"],
3414             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
3415             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3416             ftol        = selfA._parameters["CostDecrementTolerance"],
3417             messages    = selfA._parameters["optmessages"],
3418             )
3419     elif selfA._parameters["Minimizer"] == "CG":
3420         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
3421             f           = CostFunction,
3422             x0          = Xini,
3423             fprime      = GradientOfCostFunction,
3424             args        = (),
3425             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3426             gtol        = selfA._parameters["GradientNormTolerance"],
3427             disp        = selfA._parameters["optdisp"],
3428             full_output = True,
3429             )
3430     elif selfA._parameters["Minimizer"] == "NCG":
3431         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
3432             f           = CostFunction,
3433             x0          = Xini,
3434             fprime      = GradientOfCostFunction,
3435             args        = (),
3436             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3437             avextol     = selfA._parameters["CostDecrementTolerance"],
3438             disp        = selfA._parameters["optdisp"],
3439             full_output = True,
3440             )
3441     elif selfA._parameters["Minimizer"] == "BFGS":
3442         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
3443             f           = CostFunction,
3444             x0          = Xini,
3445             fprime      = GradientOfCostFunction,
3446             args        = (),
3447             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3448             gtol        = selfA._parameters["GradientNormTolerance"],
3449             disp        = selfA._parameters["optdisp"],
3450             full_output = True,
3451             )
3452     else:
3453         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
3454     #
3455     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3456     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
3457     #
3458     # Correction pour pallier a un bug de TNC sur le retour du Minimum
3459     # ----------------------------------------------------------------
3460     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
3461         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
3462     #
3463     # Obtention de l'analyse
3464     # ----------------------
3465     Xa = numpy.asmatrix(numpy.ravel( Minimum )).T
3466     #
3467     selfA.StoredVariables["Analysis"].store( Xa )
3468     #
3469     # Calculs et/ou stockages supplémentaires
3470     # ---------------------------------------
3471     if selfA._toStore("BMA"):
3472         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
3473     #
3474     return 0
3475
3476 # ==============================================================================
3477 def stdkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3478     """
3479     Standard Kalman Filter
3480     """
3481     if selfA._parameters["EstimationOf"] == "Parameters":
3482         selfA._parameters["StoreInternalVariables"] = True
3483     #
3484     # Opérateurs
3485     # ----------
3486     Ht = HO["Tangent"].asMatrix(Xb)
3487     Ha = HO["Adjoint"].asMatrix(Xb)
3488     #
3489     if selfA._parameters["EstimationOf"] == "State":
3490         Mt = EM["Tangent"].asMatrix(Xb)
3491         Ma = EM["Adjoint"].asMatrix(Xb)
3492     #
3493     if CM is not None and "Tangent" in CM and U is not None:
3494         Cm = CM["Tangent"].asMatrix(Xb)
3495     else:
3496         Cm = None
3497     #
3498     # Durée d'observation et tailles
3499     if hasattr(Y,"stepnumber"):
3500         duration = Y.stepnumber()
3501         __p = numpy.cumprod(Y.shape())[-1]
3502     else:
3503         duration = 2
3504         __p = numpy.array(Y).size
3505     #
3506     # Précalcul des inversions de B et R
3507     if selfA._parameters["StoreInternalVariables"] \
3508         or selfA._toStore("CostFunctionJ") \
3509         or selfA._toStore("CostFunctionJb") \
3510         or selfA._toStore("CostFunctionJo") \
3511         or selfA._toStore("CurrentOptimum") \
3512         or selfA._toStore("APosterioriCovariance"):
3513         BI = B.getI()
3514         RI = R.getI()
3515     #
3516     __n = Xb.size
3517     #
3518     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
3519         Xn = Xb
3520         Pn = B
3521         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3522         selfA.StoredVariables["Analysis"].store( Xb )
3523         if selfA._toStore("APosterioriCovariance"):
3524             if hasattr(B,"asfullmatrix"):
3525                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
3526             else:
3527                 selfA.StoredVariables["APosterioriCovariance"].store( B )
3528         selfA._setInternalState("seed", numpy.random.get_state())
3529     elif selfA._parameters["nextStep"]:
3530         Xn = selfA._getInternalState("Xn")
3531         Pn = selfA._getInternalState("Pn")
3532     #
3533     if selfA._parameters["EstimationOf"] == "Parameters":
3534         XaMin            = Xn
3535         previousJMinimum = numpy.finfo(float).max
3536     #
3537     for step in range(duration-1):
3538         if hasattr(Y,"store"):
3539             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
3540         else:
3541             Ynpu = numpy.ravel( Y ).reshape((__p,1))
3542         #
3543         if U is not None:
3544             if hasattr(U,"store") and len(U)>1:
3545                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
3546             elif hasattr(U,"store") and len(U)==1:
3547                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
3548             else:
3549                 Un = numpy.asmatrix(numpy.ravel( U )).T
3550         else:
3551             Un = None
3552         #
3553         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
3554             Xn_predicted = Mt * Xn
3555             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
3556                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
3557                 Xn_predicted = Xn_predicted + Cm * Un
3558             Pn_predicted = Q + Mt * Pn * Ma
3559         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
3560             # --- > Par principe, M = Id, Q = 0
3561             Xn_predicted = Xn
3562             Pn_predicted = Pn
3563         #
3564         if selfA._parameters["EstimationOf"] == "State":
3565             HX_predicted = Ht * Xn_predicted
3566             _Innovation  = Ynpu - HX_predicted
3567         elif selfA._parameters["EstimationOf"] == "Parameters":
3568             HX_predicted = Ht * Xn_predicted
3569             _Innovation  = Ynpu - HX_predicted
3570             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
3571                 _Innovation = _Innovation - Cm * Un
3572         #
3573         Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
3574         Xn = Xn_predicted + Kn * _Innovation
3575         Pn = Pn_predicted - Kn * Ht * Pn_predicted
3576         #
3577         Xa = Xn # Pointeurs
3578         #--------------------------
3579         selfA._setInternalState("Xn", Xn)
3580         selfA._setInternalState("Pn", Pn)
3581         #--------------------------
3582         #
3583         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3584         # ---> avec analysis
3585         selfA.StoredVariables["Analysis"].store( Xa )
3586         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
3587             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( Ht * Xa )
3588         if selfA._toStore("InnovationAtCurrentAnalysis"):
3589             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
3590         # ---> avec current state
3591         if selfA._parameters["StoreInternalVariables"] \
3592             or selfA._toStore("CurrentState"):
3593             selfA.StoredVariables["CurrentState"].store( Xn )
3594         if selfA._toStore("ForecastState"):
3595             selfA.StoredVariables["ForecastState"].store( Xn_predicted )
3596         if selfA._toStore("ForecastCovariance"):
3597             selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted )
3598         if selfA._toStore("BMA"):
3599             selfA.StoredVariables["BMA"].store( Xn_predicted - Xa )
3600         if selfA._toStore("InnovationAtCurrentState"):
3601             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
3602         if selfA._toStore("SimulatedObservationAtCurrentState") \
3603             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3604             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
3605         # ---> autres
3606         if selfA._parameters["StoreInternalVariables"] \
3607             or selfA._toStore("CostFunctionJ") \
3608             or selfA._toStore("CostFunctionJb") \
3609             or selfA._toStore("CostFunctionJo") \
3610             or selfA._toStore("CurrentOptimum") \
3611             or selfA._toStore("APosterioriCovariance"):
3612             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
3613             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
3614             J   = Jb + Jo
3615             selfA.StoredVariables["CostFunctionJb"].store( Jb )
3616             selfA.StoredVariables["CostFunctionJo"].store( Jo )
3617             selfA.StoredVariables["CostFunctionJ" ].store( J )
3618             #
3619             if selfA._toStore("IndexOfOptimum") \
3620                 or selfA._toStore("CurrentOptimum") \
3621                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
3622                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
3623                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
3624                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3625                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3626             if selfA._toStore("IndexOfOptimum"):
3627                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3628             if selfA._toStore("CurrentOptimum"):
3629                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
3630             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3631                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
3632             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3633                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3634             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3635                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3636             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3637                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3638         if selfA._toStore("APosterioriCovariance"):
3639             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
3640         if selfA._parameters["EstimationOf"] == "Parameters" \
3641             and J < previousJMinimum:
3642             previousJMinimum    = J
3643             XaMin               = Xa
3644             if selfA._toStore("APosterioriCovariance"):
3645                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
3646     #
3647     # Stockage final supplémentaire de l'optimum en estimation de paramètres
3648     # ----------------------------------------------------------------------
3649     if selfA._parameters["EstimationOf"] == "Parameters":
3650         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3651         selfA.StoredVariables["Analysis"].store( XaMin )
3652         if selfA._toStore("APosterioriCovariance"):
3653             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
3654         if selfA._toStore("BMA"):
3655             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
3656     #
3657     return 0
3658
3659 # ==============================================================================
3660 def uckf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3661     """
3662     Unscented Kalman Filter
3663     """
3664     if selfA._parameters["EstimationOf"] == "Parameters":
3665         selfA._parameters["StoreInternalVariables"] = True
3666     selfA._parameters["Bounds"] = ForceNumericBounds( selfA._parameters["Bounds"] )
3667     #
3668     L     = Xb.size
3669     Alpha = selfA._parameters["Alpha"]
3670     Beta  = selfA._parameters["Beta"]
3671     if selfA._parameters["Kappa"] == 0:
3672         if selfA._parameters["EstimationOf"] == "State":
3673             Kappa = 0
3674         elif selfA._parameters["EstimationOf"] == "Parameters":
3675             Kappa = 3 - L
3676     else:
3677         Kappa = selfA._parameters["Kappa"]
3678     Lambda = float( Alpha**2 ) * ( L + Kappa ) - L
3679     Gamma  = math.sqrt( L + Lambda )
3680     #
3681     Ww = []
3682     Ww.append( 0. )
3683     for i in range(2*L):
3684         Ww.append( 1. / (2.*(L + Lambda)) )
3685     #
3686     Wm = numpy.array( Ww )
3687     Wm[0] = Lambda / (L + Lambda)
3688     Wc = numpy.array( Ww )
3689     Wc[0] = Lambda / (L + Lambda) + (1. - Alpha**2 + Beta)
3690     #
3691     # Opérateurs
3692     Hm = HO["Direct"].appliedControledFormTo
3693     #
3694     if selfA._parameters["EstimationOf"] == "State":
3695         Mm = EM["Direct"].appliedControledFormTo
3696     #
3697     if CM is not None and "Tangent" in CM and U is not None:
3698         Cm = CM["Tangent"].asMatrix(Xb)
3699     else:
3700         Cm = None
3701     #
3702     # Durée d'observation et tailles
3703     if hasattr(Y,"stepnumber"):
3704         duration = Y.stepnumber()
3705         __p = numpy.cumprod(Y.shape())[-1]
3706     else:
3707         duration = 2
3708         __p = numpy.array(Y).size
3709     #
3710     # Précalcul des inversions de B et R
3711     if selfA._parameters["StoreInternalVariables"] \
3712         or selfA._toStore("CostFunctionJ") \
3713         or selfA._toStore("CostFunctionJb") \
3714         or selfA._toStore("CostFunctionJo") \
3715         or selfA._toStore("CurrentOptimum") \
3716         or selfA._toStore("APosterioriCovariance"):
3717         BI = B.getI()
3718         RI = R.getI()
3719     #
3720     __n = Xb.size
3721     #
3722     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
3723         Xn = Xb
3724         if hasattr(B,"asfullmatrix"):
3725             Pn = B.asfullmatrix(__n)
3726         else:
3727             Pn = B
3728         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3729         selfA.StoredVariables["Analysis"].store( Xb )
3730         if selfA._toStore("APosterioriCovariance"):
3731             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
3732     elif selfA._parameters["nextStep"]:
3733         Xn = selfA._getInternalState("Xn")
3734         Pn = selfA._getInternalState("Pn")
3735     #
3736     if selfA._parameters["EstimationOf"] == "Parameters":
3737         XaMin            = Xn
3738         previousJMinimum = numpy.finfo(float).max
3739     #
3740     for step in range(duration-1):
3741         if hasattr(Y,"store"):
3742             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
3743         else:
3744             Ynpu = numpy.ravel( Y ).reshape((__p,1))
3745         #
3746         if U is not None:
3747             if hasattr(U,"store") and len(U)>1:
3748                 Un = numpy.ravel( U[step] ).reshape((-1,1))
3749             elif hasattr(U,"store") and len(U)==1:
3750                 Un = numpy.ravel( U[0] ).reshape((-1,1))
3751             else:
3752                 Un = numpy.ravel( U ).reshape((-1,1))
3753         else:
3754             Un = None
3755         #
3756         Pndemi = numpy.linalg.cholesky(Pn)
3757         Xnp = numpy.hstack([Xn, Xn+Gamma*Pndemi, Xn-Gamma*Pndemi])
3758         nbSpts = 2*Xn.size+1
3759         #
3760         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3761             for point in range(nbSpts):
3762                 Xnp[:,point] = numpy.max(numpy.hstack((Xnp[:,point],numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
3763                 Xnp[:,point] = numpy.min(numpy.hstack((Xnp[:,point],numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
3764         #
3765         XEtnnp = []
3766         for point in range(nbSpts):
3767             if selfA._parameters["EstimationOf"] == "State":
3768                 XEtnnpi = numpy.asmatrix(numpy.ravel( Mm( (Xnp[:,point], Un) ) )).T
3769                 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
3770                     Cm = Cm.reshape(Xn.size,Un.size) # ADAO & check shape
3771                     XEtnnpi = XEtnnpi + Cm * Un
3772                 if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3773                     XEtnnpi = numpy.max(numpy.hstack((XEtnnpi,numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
3774                     XEtnnpi = numpy.min(numpy.hstack((XEtnnpi,numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
3775             elif selfA._parameters["EstimationOf"] == "Parameters":
3776                 # --- > Par principe, M = Id, Q = 0
3777                 XEtnnpi = Xnp[:,point]
3778             XEtnnp.append( XEtnnpi )
3779         XEtnnp = numpy.hstack( XEtnnp )
3780         #
3781         Xncm = numpy.matrix( XEtnnp.getA()*numpy.array(Wm) ).sum(axis=1)
3782         #
3783         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3784             Xncm = numpy.max(numpy.hstack((Xncm,numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
3785             Xncm = numpy.min(numpy.hstack((Xncm,numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
3786         #
3787         if selfA._parameters["EstimationOf"] == "State":        Pnm = Q
3788         elif selfA._parameters["EstimationOf"] == "Parameters": Pnm = 0.
3789         for point in range(nbSpts):
3790             Pnm += Wc[i] * (XEtnnp[:,point]-Xncm) * (XEtnnp[:,point]-Xncm).T
3791         #
3792         if selfA._parameters["EstimationOf"] == "Parameters" and selfA._parameters["Bounds"] is not None:
3793             Pnmdemi = selfA._parameters["Reconditioner"] * numpy.linalg.cholesky(Pnm)
3794         else:
3795             Pnmdemi = numpy.linalg.cholesky(Pnm)
3796         #
3797         Xnnp = numpy.hstack([Xncm, Xncm+Gamma*Pnmdemi, Xncm-Gamma*Pnmdemi])
3798         #
3799         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3800             for point in range(nbSpts):
3801                 Xnnp[:,point] = numpy.max(numpy.hstack((Xnnp[:,point],numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
3802                 Xnnp[:,point] = numpy.min(numpy.hstack((Xnnp[:,point],numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
3803         #
3804         Ynnp = []
3805         for point in range(nbSpts):
3806             if selfA._parameters["EstimationOf"] == "State":
3807                 Ynnpi = numpy.asmatrix(numpy.ravel( Hm( (Xnnp[:,point], None) ) )).T
3808             elif selfA._parameters["EstimationOf"] == "Parameters":
3809                 Ynnpi = numpy.asmatrix(numpy.ravel( Hm( (Xnnp[:,point], Un) ) )).T
3810             Ynnp.append( Ynnpi )
3811         Ynnp = numpy.hstack( Ynnp )
3812         #
3813         Yncm = numpy.matrix( Ynnp.getA()*numpy.array(Wm) ).sum(axis=1)
3814         #
3815         Pyyn = R
3816         Pxyn = 0.
3817         for point in range(nbSpts):
3818             Pyyn += Wc[i] * (Ynnp[:,point]-Yncm) * (Ynnp[:,point]-Yncm).T
3819             Pxyn += Wc[i] * (Xnnp[:,point]-Xncm) * (Ynnp[:,point]-Yncm).T
3820         #
3821         _Innovation  = Ynpu - Yncm
3822         if selfA._parameters["EstimationOf"] == "Parameters":
3823             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
3824                 _Innovation = _Innovation - Cm * Un
3825         #
3826         Kn = Pxyn * Pyyn.I
3827         Xn = Xncm + Kn * _Innovation
3828         Pn = Pnm - Kn * Pyyn * Kn.T
3829         #
3830         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3831             Xn = numpy.max(numpy.hstack((Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
3832             Xn = numpy.min(numpy.hstack((Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
3833         #
3834         Xa = Xn # Pointeurs
3835         #--------------------------
3836         selfA._setInternalState("Xn", Xn)
3837         selfA._setInternalState("Pn", Pn)
3838         #--------------------------
3839         #
3840         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3841         # ---> avec analysis
3842         selfA.StoredVariables["Analysis"].store( Xa )
3843         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
3844             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( Hm((Xa, Un)) )
3845         if selfA._toStore("InnovationAtCurrentAnalysis"):
3846             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
3847         # ---> avec current state
3848         if selfA._parameters["StoreInternalVariables"] \
3849             or selfA._toStore("CurrentState"):
3850             selfA.StoredVariables["CurrentState"].store( Xn )
3851         if selfA._toStore("ForecastState"):
3852             selfA.StoredVariables["ForecastState"].store( Xncm )
3853         if selfA._toStore("ForecastCovariance"):
3854             selfA.StoredVariables["ForecastCovariance"].store( Pnm )
3855         if selfA._toStore("BMA"):
3856             selfA.StoredVariables["BMA"].store( Xncm - Xa )
3857         if selfA._toStore("InnovationAtCurrentState"):
3858             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
3859         if selfA._toStore("SimulatedObservationAtCurrentState") \
3860             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3861             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( Yncm )
3862         # ---> autres
3863         if selfA._parameters["StoreInternalVariables"] \
3864             or selfA._toStore("CostFunctionJ") \
3865             or selfA._toStore("CostFunctionJb") \
3866             or selfA._toStore("CostFunctionJo") \
3867             or selfA._toStore("CurrentOptimum") \
3868             or selfA._toStore("APosterioriCovariance"):
3869             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
3870             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
3871             J   = Jb + Jo
3872             selfA.StoredVariables["CostFunctionJb"].store( Jb )
3873             selfA.StoredVariables["CostFunctionJo"].store( Jo )
3874             selfA.StoredVariables["CostFunctionJ" ].store( J )
3875             #
3876             if selfA._toStore("IndexOfOptimum") \
3877                 or selfA._toStore("CurrentOptimum") \
3878                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
3879                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
3880                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
3881                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3882                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3883             if selfA._toStore("IndexOfOptimum"):
3884                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3885             if selfA._toStore("CurrentOptimum"):
3886                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
3887             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3888                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
3889             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3890                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3891             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3892                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3893             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3894                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3895         if selfA._toStore("APosterioriCovariance"):
3896             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
3897         if selfA._parameters["EstimationOf"] == "Parameters" \
3898             and J < previousJMinimum:
3899             previousJMinimum    = J
3900             XaMin               = Xa
3901             if selfA._toStore("APosterioriCovariance"):
3902                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
3903     #
3904     # Stockage final supplémentaire de l'optimum en estimation de paramètres
3905     # ----------------------------------------------------------------------
3906     if selfA._parameters["EstimationOf"] == "Parameters":
3907         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3908         selfA.StoredVariables["Analysis"].store( XaMin )
3909         if selfA._toStore("APosterioriCovariance"):
3910             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
3911         if selfA._toStore("BMA"):
3912             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
3913     #
3914     return 0
3915
3916 # ==============================================================================
3917 def van3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3918     """
3919     3DVAR variational analysis with no inversion of B
3920     """
3921     #
3922     # Initialisations
3923     # ---------------
3924     #
3925     # Opérateurs
3926     Hm = HO["Direct"].appliedTo
3927     Ha = HO["Adjoint"].appliedInXTo
3928     #
3929     # Précalcul des inversions de B et R
3930     BT = B.getT()
3931     RI = R.getI()
3932     #
3933     # Point de démarrage de l'optimisation
3934     Xini = numpy.zeros(Xb.shape)
3935     #
3936     # Définition de la fonction-coût
3937     # ------------------------------
3938     def CostFunction(v):
3939         _V = numpy.asmatrix(numpy.ravel( v )).T
3940         _X = Xb + B * _V
3941         if selfA._parameters["StoreInternalVariables"] or \
3942             selfA._toStore("CurrentState") or \
3943             selfA._toStore("CurrentOptimum"):
3944             selfA.StoredVariables["CurrentState"].store( _X )
3945         _HX = Hm( _X )
3946         _HX = numpy.asmatrix(numpy.ravel( _HX )).T
3947         _Innovation = Y - _HX
3948         if selfA._toStore("SimulatedObservationAtCurrentState") or \
3949             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3950             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
3951         if selfA._toStore("InnovationAtCurrentState"):
3952             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
3953         #
3954         Jb  = float( 0.5 * _V.T * BT * _V )
3955         Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
3956         J   = Jb + Jo
3957         #
3958         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
3959         selfA.StoredVariables["CostFunctionJb"].store( Jb )
3960         selfA.StoredVariables["CostFunctionJo"].store( Jo )
3961         selfA.StoredVariables["CostFunctionJ" ].store( J )
3962         if selfA._toStore("IndexOfOptimum") or \
3963             selfA._toStore("CurrentOptimum") or \
3964             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
3965             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
3966             selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
3967             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3968             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3969         if selfA._toStore("IndexOfOptimum"):
3970             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3971         if selfA._toStore("CurrentOptimum"):
3972             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
3973         if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3974             selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
3975         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3976             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3977         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3978             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3979         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3980             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3981         return J
3982     #
3983     def GradientOfCostFunction(v):
3984         _V = numpy.asmatrix(numpy.ravel( v )).T
3985         _X = Xb + B * _V
3986         _HX     = Hm( _X )
3987         _HX     = numpy.asmatrix(numpy.ravel( _HX )).T
3988         GradJb  = BT * _V
3989         GradJo  = - Ha( (_X, RI * (Y - _HX)) )
3990         GradJ   = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
3991         return GradJ
3992     #
3993     # Minimisation de la fonctionnelle
3994     # --------------------------------
3995     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
3996     #
3997     if selfA._parameters["Minimizer"] == "LBFGSB":
3998         if "0.19" <= scipy.version.version <= "1.1.0":
3999             import lbfgsbhlt as optimiseur
4000         else:
4001             import scipy.optimize as optimiseur
4002         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
4003             func        = CostFunction,
4004             x0          = Xini,
4005             fprime      = GradientOfCostFunction,
4006             args        = (),
4007             bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
4008             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
4009             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
4010             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
4011             iprint      = selfA._parameters["optiprint"],
4012             )
4013         nfeval = Informations['funcalls']
4014         rc     = Informations['warnflag']
4015     elif selfA._parameters["Minimizer"] == "TNC":
4016         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
4017             func        = CostFunction,
4018             x0          = Xini,
4019             fprime      = GradientOfCostFunction,
4020             args        = (),
4021             bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
4022             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
4023             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
4024             ftol        = selfA._parameters["CostDecrementTolerance"],
4025             messages    = selfA._parameters["optmessages"],
4026             )
4027     elif selfA._parameters["Minimizer"] == "CG":
4028         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
4029             f           = CostFunction,
4030             x0          = Xini,
4031             fprime      = GradientOfCostFunction,
4032             args        = (),
4033             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
4034             gtol        = selfA._parameters["GradientNormTolerance"],
4035             disp        = selfA._parameters["optdisp"],
4036             full_output = True,
4037             )
4038     elif selfA._parameters["Minimizer"] == "NCG":
4039         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
4040             f           = CostFunction,
4041             x0          = Xini,
4042             fprime      = GradientOfCostFunction,
4043             args        = (),
4044             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
4045             avextol     = selfA._parameters["CostDecrementTolerance"],
4046             disp        = selfA._parameters["optdisp"],
4047             full_output = True,
4048             )
4049     elif selfA._parameters["Minimizer"] == "BFGS":
4050         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
4051             f           = CostFunction,
4052             x0          = Xini,
4053             fprime      = GradientOfCostFunction,
4054             args        = (),
4055             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
4056             gtol        = selfA._parameters["GradientNormTolerance"],
4057             disp        = selfA._parameters["optdisp"],
4058             full_output = True,
4059             )
4060     else:
4061         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
4062     #
4063     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
4064     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
4065     #
4066     # Correction pour pallier a un bug de TNC sur le retour du Minimum
4067     # ----------------------------------------------------------------
4068     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
4069         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
4070         Minimum = numpy.asmatrix(numpy.ravel( Minimum )).T
4071     else:
4072         Minimum = Xb + B * numpy.asmatrix(numpy.ravel( Minimum )).T
4073     #
4074     # Obtention de l'analyse
4075     # ----------------------
4076     Xa = Minimum
4077     #
4078     selfA.StoredVariables["Analysis"].store( Xa )
4079     #
4080     if selfA._toStore("OMA") or \
4081         selfA._toStore("SigmaObs2") or \
4082         selfA._toStore("SimulationQuantiles") or \
4083         selfA._toStore("SimulatedObservationAtOptimum"):
4084         if selfA._toStore("SimulatedObservationAtCurrentState"):
4085             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
4086         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
4087             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
4088         else:
4089             HXa = Hm( Xa )
4090     #
4091     # Calcul de la covariance d'analyse
4092     # ---------------------------------
4093     if selfA._toStore("APosterioriCovariance") or \
4094         selfA._toStore("SimulationQuantiles") or \
4095         selfA._toStore("JacobianMatrixAtOptimum") or \
4096         selfA._toStore("KalmanGainAtOptimum"):
4097         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
4098         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
4099     if selfA._toStore("APosterioriCovariance") or \
4100         selfA._toStore("SimulationQuantiles") or \
4101         selfA._toStore("KalmanGainAtOptimum"):
4102         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
4103         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
4104     if selfA._toStore("APosterioriCovariance") or \
4105         selfA._toStore("SimulationQuantiles"):
4106         BI = B.getI()
4107         HessienneI = []
4108         nb = Xa.size
4109         for i in range(nb):
4110             _ee    = numpy.matrix(numpy.zeros(nb)).T
4111             _ee[i] = 1.
4112             _HtEE  = numpy.dot(HtM,_ee)
4113             _HtEE  = numpy.asmatrix(numpy.ravel( _HtEE )).T
4114             HessienneI.append( numpy.ravel( BI*_ee + HaM * (RI * _HtEE) ) )
4115         HessienneI = numpy.matrix( HessienneI )
4116         A = HessienneI.I
4117         if min(A.shape) != max(A.shape):
4118             raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(selfA._name,str(A.shape)))
4119         if (numpy.diag(A) < 0).any():
4120             raise ValueError("The %s a posteriori covariance matrix A has at least one negative value on its diagonal. There is an error in the observation operator, please check it."%(selfA._name,))
4121         if logging.getLogger().level < logging.WARNING: # La verification n'a lieu qu'en debug
4122             try:
4123                 L = numpy.linalg.cholesky( A )
4124             except:
4125                 raise ValueError("The %s a posteriori covariance matrix A is not symmetric positive-definite. Please check your a priori covariances and your observation operator."%(selfA._name,))
4126     if selfA._toStore("APosterioriCovariance"):
4127         selfA.StoredVariables["APosterioriCovariance"].store( A )
4128     if selfA._toStore("JacobianMatrixAtOptimum"):
4129         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
4130     if selfA._toStore("KalmanGainAtOptimum"):
4131         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
4132         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
4133         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
4134     #
4135     # Calculs et/ou stockages supplémentaires
4136     # ---------------------------------------
4137     if selfA._toStore("Innovation") or \
4138         selfA._toStore("SigmaObs2") or \
4139         selfA._toStore("MahalanobisConsistency") or \
4140         selfA._toStore("OMB"):
4141         d  = Y - HXb
4142     if selfA._toStore("Innovation"):
4143         selfA.StoredVariables["Innovation"].store( numpy.ravel(d) )
4144     if selfA._toStore("BMA"):
4145         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
4146     if selfA._toStore("OMA"):
4147         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
4148     if selfA._toStore("OMB"):
4149         selfA.StoredVariables["OMB"].store( numpy.ravel(d) )
4150     if selfA._toStore("SigmaObs2"):
4151         TraceR = R.trace(Y.size)
4152         selfA.StoredVariables["SigmaObs2"].store( float( (d.T * (numpy.asmatrix(numpy.ravel(Y)).T-numpy.asmatrix(numpy.ravel(HXa)).T)) ) / TraceR )
4153     if selfA._toStore("MahalanobisConsistency"):
4154         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
4155     if selfA._toStore("SimulationQuantiles"):
4156         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
4157     if selfA._toStore("SimulatedObservationAtBackground"):
4158         selfA.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
4159     if selfA._toStore("SimulatedObservationAtOptimum"):
4160         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
4161     #
4162     return 0
4163
4164 # ==============================================================================
4165 if __name__ == "__main__":
4166     print('\n AUTODIAGNOSTIC\n')