Salome HOME
Minor documentation review corrections (6)
[modules/adao.git] / src / daComposant / daCore / NumericObjects.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2021 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 __doc__ = """
24     Définit les objets numériques génériques.
25 """
26 __author__ = "Jean-Philippe ARGAUD"
27
28 import os, time, copy, types, sys, logging
29 import math, numpy, scipy, scipy.optimize, scipy.version
30 from daCore.BasicObjects import Operator
31 from daCore.PlatformInfo import PlatformInfo
32 mpr = PlatformInfo().MachinePrecision()
33 mfp = PlatformInfo().MaximumPrecision()
34 # logging.getLogger().setLevel(logging.DEBUG)
35
36 # ==============================================================================
37 def ExecuteFunction( triplet ):
38     assert len(triplet) == 3, "Incorrect number of arguments"
39     X, xArgs, funcrepr = triplet
40     __X = numpy.asmatrix(numpy.ravel( X )).T
41     __sys_path_tmp = sys.path ; sys.path.insert(0,funcrepr["__userFunction__path"])
42     __module = __import__(funcrepr["__userFunction__modl"], globals(), locals(), [])
43     __fonction = getattr(__module,funcrepr["__userFunction__name"])
44     sys.path = __sys_path_tmp ; del __sys_path_tmp
45     if isinstance(xArgs, dict):
46         __HX  = __fonction( __X, **xArgs )
47     else:
48         __HX  = __fonction( __X )
49     return numpy.ravel( __HX )
50
51 # ==============================================================================
52 class FDApproximation(object):
53     """
54     Cette classe sert d'interface pour définir les opérateurs approximés. A la
55     création d'un objet, en fournissant une fonction "Function", on obtient un
56     objet qui dispose de 3 méthodes "DirectOperator", "TangentOperator" et
57     "AdjointOperator". On contrôle l'approximation DF avec l'incrément
58     multiplicatif "increment" valant par défaut 1%, ou avec l'incrément fixe
59     "dX" qui sera multiplié par "increment" (donc en %), et on effectue de DF
60     centrées si le booléen "centeredDF" est vrai.
61     """
62     def __init__(self,
63             name                  = "FDApproximation",
64             Function              = None,
65             centeredDF            = False,
66             increment             = 0.01,
67             dX                    = None,
68             extraArguments        = None,
69             avoidingRedundancy    = True,
70             toleranceInRedundancy = 1.e-18,
71             lenghtOfRedundancy    = -1,
72             mpEnabled             = False,
73             mpWorkers             = None,
74             mfEnabled             = False,
75             ):
76         self.__name = str(name)
77         self.__extraArgs = extraArguments
78         if mpEnabled:
79             try:
80                 import multiprocessing
81                 self.__mpEnabled = True
82             except ImportError:
83                 self.__mpEnabled = False
84         else:
85             self.__mpEnabled = False
86         self.__mpWorkers = mpWorkers
87         if self.__mpWorkers is not None and self.__mpWorkers < 1:
88             self.__mpWorkers = None
89         logging.debug("FDA Calculs en multiprocessing : %s (nombre de processus : %s)"%(self.__mpEnabled,self.__mpWorkers))
90         #
91         if mfEnabled:
92             self.__mfEnabled = True
93         else:
94             self.__mfEnabled = False
95         logging.debug("FDA Calculs en multifonctions : %s"%(self.__mfEnabled,))
96         #
97         if avoidingRedundancy:
98             self.__avoidRC = True
99             self.__tolerBP = float(toleranceInRedundancy)
100             self.__lenghtRJ = int(lenghtOfRedundancy)
101             self.__listJPCP = [] # Jacobian Previous Calculated Points
102             self.__listJPCI = [] # Jacobian Previous Calculated Increment
103             self.__listJPCR = [] # Jacobian Previous Calculated Results
104             self.__listJPPN = [] # Jacobian Previous Calculated Point Norms
105             self.__listJPIN = [] # Jacobian Previous Calculated Increment Norms
106         else:
107             self.__avoidRC = False
108         #
109         if self.__mpEnabled:
110             if isinstance(Function,types.FunctionType):
111                 logging.debug("FDA Calculs en multiprocessing : FunctionType")
112                 self.__userFunction__name = Function.__name__
113                 try:
114                     mod = os.path.join(Function.__globals__['filepath'],Function.__globals__['filename'])
115                 except:
116                     mod = os.path.abspath(Function.__globals__['__file__'])
117                 if not os.path.isfile(mod):
118                     raise ImportError("No user defined function or method found with the name %s"%(mod,))
119                 self.__userFunction__modl = os.path.basename(mod).replace('.pyc','').replace('.pyo','').replace('.py','')
120                 self.__userFunction__path = os.path.dirname(mod)
121                 del mod
122                 self.__userOperator = Operator( name = self.__name, fromMethod = Function, avoidingRedundancy = self.__avoidRC, inputAsMultiFunction = self.__mfEnabled, extraArguments = self.__extraArgs )
123                 self.__userFunction = self.__userOperator.appliedTo # Pour le calcul Direct
124             elif isinstance(Function,types.MethodType):
125                 logging.debug("FDA Calculs en multiprocessing : MethodType")
126                 self.__userFunction__name = Function.__name__
127                 try:
128                     mod = os.path.join(Function.__globals__['filepath'],Function.__globals__['filename'])
129                 except:
130                     mod = os.path.abspath(Function.__func__.__globals__['__file__'])
131                 if not os.path.isfile(mod):
132                     raise ImportError("No user defined function or method found with the name %s"%(mod,))
133                 self.__userFunction__modl = os.path.basename(mod).replace('.pyc','').replace('.pyo','').replace('.py','')
134                 self.__userFunction__path = os.path.dirname(mod)
135                 del mod
136                 self.__userOperator = Operator( name = self.__name, fromMethod = Function, avoidingRedundancy = self.__avoidRC, inputAsMultiFunction = self.__mfEnabled, extraArguments = self.__extraArgs )
137                 self.__userFunction = self.__userOperator.appliedTo # Pour le calcul Direct
138             else:
139                 raise TypeError("User defined function or method has to be provided for finite differences approximation.")
140         else:
141             self.__userOperator = Operator( name = self.__name, fromMethod = Function, avoidingRedundancy = self.__avoidRC, inputAsMultiFunction = self.__mfEnabled, extraArguments = self.__extraArgs )
142             self.__userFunction = self.__userOperator.appliedTo
143         #
144         self.__centeredDF = bool(centeredDF)
145         if abs(float(increment)) > 1.e-15:
146             self.__increment  = float(increment)
147         else:
148             self.__increment  = 0.01
149         if dX is None:
150             self.__dX     = None
151         else:
152             self.__dX     = numpy.asmatrix(numpy.ravel( dX )).T
153         logging.debug("FDA Reduction des doublons de calcul : %s"%self.__avoidRC)
154         if self.__avoidRC:
155             logging.debug("FDA Tolerance de determination des doublons : %.2e"%self.__tolerBP)
156
157     # ---------------------------------------------------------
158     def __doublon__(self, e, l, n, v=None):
159         __ac, __iac = False, -1
160         for i in range(len(l)-1,-1,-1):
161             if numpy.linalg.norm(e - l[i]) < self.__tolerBP * n[i]:
162                 __ac, __iac = True, i
163                 if v is not None: logging.debug("FDA Cas%s déja calculé, récupération du doublon %i"%(v,__iac))
164                 break
165         return __ac, __iac
166
167     # ---------------------------------------------------------
168     def DirectOperator(self, X, **extraArgs ):
169         """
170         Calcul du direct à l'aide de la fonction fournie.
171
172         NB : les extraArgs sont là pour assurer la compatibilité d'appel, mais
173         ne doivent pas être données ici à la fonction utilisateur.
174         """
175         logging.debug("FDA Calcul DirectOperator (explicite)")
176         if self.__mfEnabled:
177             _HX = self.__userFunction( X, argsAsSerie = True )
178         else:
179             _X = numpy.asmatrix(numpy.ravel( X )).T
180             _HX = numpy.ravel(self.__userFunction( _X ))
181         #
182         return _HX
183
184     # ---------------------------------------------------------
185     def TangentMatrix(self, X ):
186         """
187         Calcul de l'opérateur tangent comme la Jacobienne par différences finies,
188         c'est-à-dire le gradient de H en X. On utilise des différences finies
189         directionnelles autour du point X. X est un numpy.matrix.
190
191         Différences finies centrées (approximation d'ordre 2):
192         1/ Pour chaque composante i de X, on ajoute et on enlève la perturbation
193            dX[i] à la  composante X[i], pour composer X_plus_dXi et X_moins_dXi, et
194            on calcule les réponses HX_plus_dXi = H( X_plus_dXi ) et HX_moins_dXi =
195            H( X_moins_dXi )
196         2/ On effectue les différences (HX_plus_dXi-HX_moins_dXi) et on divise par
197            le pas 2*dXi
198         3/ Chaque résultat, par composante, devient une colonne de la Jacobienne
199
200         Différences finies non centrées (approximation d'ordre 1):
201         1/ Pour chaque composante i de X, on ajoute la perturbation dX[i] à la
202            composante X[i] pour composer X_plus_dXi, et on calcule la réponse
203            HX_plus_dXi = H( X_plus_dXi )
204         2/ On calcule la valeur centrale HX = H(X)
205         3/ On effectue les différences (HX_plus_dXi-HX) et on divise par
206            le pas dXi
207         4/ Chaque résultat, par composante, devient une colonne de la Jacobienne
208
209         """
210         logging.debug("FDA Début du calcul de la Jacobienne")
211         logging.debug("FDA   Incrément de............: %s*X"%float(self.__increment))
212         logging.debug("FDA   Approximation centrée...: %s"%(self.__centeredDF))
213         #
214         if X is None or len(X)==0:
215             raise ValueError("Nominal point X for approximate derivatives can not be None or void (given X: %s)."%(str(X),))
216         #
217         _X = numpy.asmatrix(numpy.ravel( X )).T
218         #
219         if self.__dX is None:
220             _dX  = self.__increment * _X
221         else:
222             _dX = numpy.asmatrix(numpy.ravel( self.__dX )).T
223         #
224         if (_dX == 0.).any():
225             moyenne = _dX.mean()
226             if moyenne == 0.:
227                 _dX = numpy.where( _dX == 0., float(self.__increment), _dX )
228             else:
229                 _dX = numpy.where( _dX == 0., moyenne, _dX )
230         #
231         __alreadyCalculated  = False
232         if self.__avoidRC:
233             __bidon, __alreadyCalculatedP = self.__doublon__(_X,  self.__listJPCP, self.__listJPPN, None)
234             __bidon, __alreadyCalculatedI = self.__doublon__(_dX, self.__listJPCI, self.__listJPIN, None)
235             if __alreadyCalculatedP == __alreadyCalculatedI > -1:
236                 __alreadyCalculated, __i = True, __alreadyCalculatedP
237                 logging.debug("FDA Cas J déja calculé, récupération du doublon %i"%__i)
238         #
239         if __alreadyCalculated:
240             logging.debug("FDA   Calcul Jacobienne (par récupération du doublon %i)"%__i)
241             _Jacobienne = self.__listJPCR[__i]
242         else:
243             logging.debug("FDA   Calcul Jacobienne (explicite)")
244             if self.__centeredDF:
245                 #
246                 if self.__mpEnabled and not self.__mfEnabled:
247                     funcrepr = {
248                         "__userFunction__path" : self.__userFunction__path,
249                         "__userFunction__modl" : self.__userFunction__modl,
250                         "__userFunction__name" : self.__userFunction__name,
251                     }
252                     _jobs = []
253                     for i in range( len(_dX) ):
254                         _dXi            = _dX[i]
255                         _X_plus_dXi     = numpy.array( _X.A1, dtype=float )
256                         _X_plus_dXi[i]  = _X[i] + _dXi
257                         _X_moins_dXi    = numpy.array( _X.A1, dtype=float )
258                         _X_moins_dXi[i] = _X[i] - _dXi
259                         #
260                         _jobs.append( (_X_plus_dXi,  self.__extraArgs, funcrepr) )
261                         _jobs.append( (_X_moins_dXi, self.__extraArgs, funcrepr) )
262                     #
263                     import multiprocessing
264                     self.__pool = multiprocessing.Pool(self.__mpWorkers)
265                     _HX_plusmoins_dX = self.__pool.map( ExecuteFunction, _jobs )
266                     self.__pool.close()
267                     self.__pool.join()
268                     #
269                     _Jacobienne  = []
270                     for i in range( len(_dX) ):
271                         _Jacobienne.append( numpy.ravel( _HX_plusmoins_dX[2*i] - _HX_plusmoins_dX[2*i+1] ) / (2.*_dX[i]) )
272                     #
273                 elif self.__mfEnabled:
274                     _xserie = []
275                     for i in range( len(_dX) ):
276                         _dXi            = _dX[i]
277                         _X_plus_dXi     = numpy.array( _X.A1, dtype=float )
278                         _X_plus_dXi[i]  = _X[i] + _dXi
279                         _X_moins_dXi    = numpy.array( _X.A1, dtype=float )
280                         _X_moins_dXi[i] = _X[i] - _dXi
281                         #
282                         _xserie.append( _X_plus_dXi )
283                         _xserie.append( _X_moins_dXi )
284                     #
285                     _HX_plusmoins_dX = self.DirectOperator( _xserie )
286                      #
287                     _Jacobienne  = []
288                     for i in range( len(_dX) ):
289                         _Jacobienne.append( numpy.ravel( _HX_plusmoins_dX[2*i] - _HX_plusmoins_dX[2*i+1] ) / (2.*_dX[i]) )
290                     #
291                 else:
292                     _Jacobienne  = []
293                     for i in range( _dX.size ):
294                         _dXi            = _dX[i]
295                         _X_plus_dXi     = numpy.array( _X.A1, dtype=float )
296                         _X_plus_dXi[i]  = _X[i] + _dXi
297                         _X_moins_dXi    = numpy.array( _X.A1, dtype=float )
298                         _X_moins_dXi[i] = _X[i] - _dXi
299                         #
300                         _HX_plus_dXi    = self.DirectOperator( _X_plus_dXi )
301                         _HX_moins_dXi   = self.DirectOperator( _X_moins_dXi )
302                         #
303                         _Jacobienne.append( numpy.ravel( _HX_plus_dXi - _HX_moins_dXi ) / (2.*_dXi) )
304                 #
305             else:
306                 #
307                 if self.__mpEnabled and not self.__mfEnabled:
308                     funcrepr = {
309                         "__userFunction__path" : self.__userFunction__path,
310                         "__userFunction__modl" : self.__userFunction__modl,
311                         "__userFunction__name" : self.__userFunction__name,
312                     }
313                     _jobs = []
314                     _jobs.append( (_X.A1, self.__extraArgs, funcrepr) )
315                     for i in range( len(_dX) ):
316                         _X_plus_dXi    = numpy.array( _X.A1, dtype=float )
317                         _X_plus_dXi[i] = _X[i] + _dX[i]
318                         #
319                         _jobs.append( (_X_plus_dXi, self.__extraArgs, funcrepr) )
320                     #
321                     import multiprocessing
322                     self.__pool = multiprocessing.Pool(self.__mpWorkers)
323                     _HX_plus_dX = self.__pool.map( ExecuteFunction, _jobs )
324                     self.__pool.close()
325                     self.__pool.join()
326                     #
327                     _HX = _HX_plus_dX.pop(0)
328                     #
329                     _Jacobienne = []
330                     for i in range( len(_dX) ):
331                         _Jacobienne.append( numpy.ravel(( _HX_plus_dX[i] - _HX ) / _dX[i]) )
332                     #
333                 elif self.__mfEnabled:
334                     _xserie = []
335                     _xserie.append( _X.A1 )
336                     for i in range( len(_dX) ):
337                         _X_plus_dXi    = numpy.array( _X.A1, dtype=float )
338                         _X_plus_dXi[i] = _X[i] + _dX[i]
339                         #
340                         _xserie.append( _X_plus_dXi )
341                     #
342                     _HX_plus_dX = self.DirectOperator( _xserie )
343                     #
344                     _HX = _HX_plus_dX.pop(0)
345                     #
346                     _Jacobienne = []
347                     for i in range( len(_dX) ):
348                         _Jacobienne.append( numpy.ravel(( _HX_plus_dX[i] - _HX ) / _dX[i]) )
349                    #
350                 else:
351                     _Jacobienne  = []
352                     _HX = self.DirectOperator( _X )
353                     for i in range( _dX.size ):
354                         _dXi            = _dX[i]
355                         _X_plus_dXi     = numpy.array( _X.A1, dtype=float )
356                         _X_plus_dXi[i]  = _X[i] + _dXi
357                         #
358                         _HX_plus_dXi = self.DirectOperator( _X_plus_dXi )
359                         #
360                         _Jacobienne.append( numpy.ravel(( _HX_plus_dXi - _HX ) / _dXi) )
361                 #
362             #
363             _Jacobienne = numpy.asmatrix( numpy.vstack( _Jacobienne ) ).T
364             if self.__avoidRC:
365                 if self.__lenghtRJ < 0: self.__lenghtRJ = 2 * _X.size
366                 while len(self.__listJPCP) > self.__lenghtRJ:
367                     self.__listJPCP.pop(0)
368                     self.__listJPCI.pop(0)
369                     self.__listJPCR.pop(0)
370                     self.__listJPPN.pop(0)
371                     self.__listJPIN.pop(0)
372                 self.__listJPCP.append( copy.copy(_X) )
373                 self.__listJPCI.append( copy.copy(_dX) )
374                 self.__listJPCR.append( copy.copy(_Jacobienne) )
375                 self.__listJPPN.append( numpy.linalg.norm(_X) )
376                 self.__listJPIN.append( numpy.linalg.norm(_Jacobienne) )
377         #
378         logging.debug("FDA Fin du calcul de la Jacobienne")
379         #
380         return _Jacobienne
381
382     # ---------------------------------------------------------
383     def TangentOperator(self, paire, **extraArgs ):
384         """
385         Calcul du tangent à l'aide de la Jacobienne.
386
387         NB : les extraArgs sont là pour assurer la compatibilité d'appel, mais
388         ne doivent pas être données ici à la fonction utilisateur.
389         """
390         if self.__mfEnabled:
391             assert len(paire) == 1, "Incorrect lenght of arguments"
392             _paire = paire[0]
393             assert len(_paire) == 2, "Incorrect number of arguments"
394         else:
395             assert len(paire) == 2, "Incorrect number of arguments"
396             _paire = paire
397         X, dX = _paire
398         _Jacobienne = self.TangentMatrix( X )
399         if dX is None or len(dX) == 0:
400             #
401             # Calcul de la forme matricielle si le second argument est None
402             # -------------------------------------------------------------
403             if self.__mfEnabled: return [_Jacobienne,]
404             else:                return _Jacobienne
405         else:
406             #
407             # Calcul de la valeur linéarisée de H en X appliqué à dX
408             # ------------------------------------------------------
409             _dX = numpy.asmatrix(numpy.ravel( dX )).T
410             _HtX = numpy.dot(_Jacobienne, _dX)
411             if self.__mfEnabled: return [_HtX.A1,]
412             else:                return _HtX.A1
413
414     # ---------------------------------------------------------
415     def AdjointOperator(self, paire, **extraArgs ):
416         """
417         Calcul de l'adjoint à l'aide de la Jacobienne.
418
419         NB : les extraArgs sont là pour assurer la compatibilité d'appel, mais
420         ne doivent pas être données ici à la fonction utilisateur.
421         """
422         if self.__mfEnabled:
423             assert len(paire) == 1, "Incorrect lenght of arguments"
424             _paire = paire[0]
425             assert len(_paire) == 2, "Incorrect number of arguments"
426         else:
427             assert len(paire) == 2, "Incorrect number of arguments"
428             _paire = paire
429         X, Y = _paire
430         _JacobienneT = self.TangentMatrix( X ).T
431         if Y is None or len(Y) == 0:
432             #
433             # Calcul de la forme matricielle si le second argument est None
434             # -------------------------------------------------------------
435             if self.__mfEnabled: return [_JacobienneT,]
436             else:                return _JacobienneT
437         else:
438             #
439             # Calcul de la valeur de l'adjoint en X appliqué à Y
440             # --------------------------------------------------
441             _Y = numpy.asmatrix(numpy.ravel( Y )).T
442             _HaY = numpy.dot(_JacobienneT, _Y)
443             if self.__mfEnabled: return [_HaY.A1,]
444             else:                return _HaY.A1
445
446 # ==============================================================================
447 def EnsembleOfCenteredPerturbations( _bgcenter, _bgcovariance, _nbmembers ):
448     "Génération d'un ensemble de taille _nbmembers-1 d'états aléatoires centrés"
449     #
450     _bgcenter = numpy.ravel(_bgcenter)[:,None]
451     if _nbmembers < 1:
452         raise ValueError("Number of members has to be strictly more than 1 (given number: %s)."%(str(_nbmembers),))
453     #
454     if _bgcovariance is None:
455         BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers)
456     else:
457         _Z = numpy.random.multivariate_normal(numpy.zeros(_bgcenter.size), _bgcovariance, size=_nbmembers).T
458         BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers) + _Z
459     #
460     return BackgroundEnsemble
461
462 # ==============================================================================
463 def EnsembleOfBackgroundPerturbations( _bgcenter, _bgcovariance, _nbmembers, _withSVD = True):
464     "Génération d'un ensemble de taille _nbmembers-1 d'états aléatoires centrés"
465     def __CenteredRandomAnomalies(Zr, N):
466         """
467         Génère une matrice de N anomalies aléatoires centrées sur Zr selon les
468         notes manuscrites de MB et conforme au code de PS avec eps = -1
469         """
470         eps = -1
471         Q = numpy.identity(N-1)-numpy.ones((N-1,N-1))/numpy.sqrt(N)/(numpy.sqrt(N)-eps)
472         Q = numpy.concatenate((Q, [eps*numpy.ones(N-1)/numpy.sqrt(N)]), axis=0)
473         R, _ = numpy.linalg.qr(numpy.random.normal(size = (N-1,N-1)))
474         Q = numpy.dot(Q,R)
475         Zr = numpy.dot(Q,Zr)
476         return Zr.T
477     #
478     _bgcenter = numpy.ravel(_bgcenter).reshape((-1,1))
479     if _nbmembers < 1:
480         raise ValueError("Number of members has to be strictly more than 1 (given number: %s)."%(str(_nbmembers),))
481     if _bgcovariance is None:
482         BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers)
483     else:
484         if _withSVD:
485             U, s, V = numpy.linalg.svd(_bgcovariance, full_matrices=False)
486             _nbctl = _bgcenter.size
487             if _nbmembers > _nbctl:
488                 _Z = numpy.concatenate((numpy.dot(
489                     numpy.diag(numpy.sqrt(s[:_nbctl])), V[:_nbctl]),
490                     numpy.random.multivariate_normal(numpy.zeros(_nbctl),_bgcovariance,_nbmembers-1-_nbctl)), axis = 0)
491             else:
492                 _Z = numpy.dot(numpy.diag(numpy.sqrt(s[:_nbmembers-1])), V[:_nbmembers-1])
493             _Zca = __CenteredRandomAnomalies(_Z, _nbmembers)
494             BackgroundEnsemble = _bgcenter + _Zca
495         else:
496             if max(abs(_bgcovariance.flatten())) > 0:
497                 _nbctl = _bgcenter.size
498                 _Z = numpy.random.multivariate_normal(numpy.zeros(_nbctl),_bgcovariance,_nbmembers-1)
499                 _Zca = __CenteredRandomAnomalies(_Z, _nbmembers)
500                 BackgroundEnsemble = _bgcenter + _Zca
501             else:
502                 BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers)
503     #
504     return BackgroundEnsemble
505
506 # ==============================================================================
507 def EnsembleMean( __Ensemble ):
508     "Renvoie la moyenne empirique d'un ensemble"
509     return numpy.asarray(__Ensemble).mean(axis=1, dtype=mfp).astype('float').reshape((-1,1))
510
511 # ==============================================================================
512 def EnsembleOfAnomalies( __Ensemble, __OptMean = None, __Normalisation = 1.):
513     "Renvoie les anomalies centrées à partir d'un ensemble"
514     if __OptMean is None:
515         __Em = EnsembleMean( __Ensemble )
516     else:
517         __Em = numpy.ravel( __OptMean ).reshape((-1,1))
518     #
519     return __Normalisation * (numpy.asarray( __Ensemble ) - __Em)
520
521 # ==============================================================================
522 def EnsembleErrorCovariance( __Ensemble, __quick = False ):
523     "Renvoie l'estimation empirique de la covariance d'ensemble"
524     if __quick:
525         # Covariance rapide mais rarement définie positive
526         __Covariance = numpy.cov( __Ensemble )
527     else:
528         # Résultat souvent identique à numpy.cov, mais plus robuste
529         __n, __m = numpy.asarray( __Ensemble ).shape
530         __Anomalies = EnsembleOfAnomalies( __Ensemble )
531         # Estimation empirique
532         __Covariance = ( __Anomalies @ __Anomalies.T ) / (__m-1)
533         # Assure la symétrie
534         __Covariance = ( __Covariance + __Covariance.T ) * 0.5
535         # Assure la positivité
536         __epsilon    = mpr*numpy.trace( __Covariance )
537         __Covariance = __Covariance + __epsilon * numpy.identity(__n)
538     #
539     return __Covariance
540
541 # ==============================================================================
542 def EnsemblePerturbationWithGivenCovariance( __Ensemble, __Covariance, __Seed=None ):
543     "Ajout d'une perturbation à chaque membre d'un ensemble selon une covariance prescrite"
544     if hasattr(__Covariance,"assparsematrix"):
545         if (abs(__Ensemble).mean() > mpr) and (abs(__Covariance.assparsematrix())/abs(__Ensemble).mean() < mpr).all():
546             # Traitement d'une covariance nulle ou presque
547             return __Ensemble
548         if (abs(__Ensemble).mean() <= mpr) and (abs(__Covariance.assparsematrix()) < mpr).all():
549             # Traitement d'une covariance nulle ou presque
550             return __Ensemble
551     else:
552         if (abs(__Ensemble).mean() > mpr) and (abs(__Covariance)/abs(__Ensemble).mean() < mpr).all():
553             # Traitement d'une covariance nulle ou presque
554             return __Ensemble
555         if (abs(__Ensemble).mean() <= mpr) and (abs(__Covariance) < mpr).all():
556             # Traitement d'une covariance nulle ou presque
557             return __Ensemble
558     #
559     __n, __m = __Ensemble.shape
560     if __Seed is not None: numpy.random.seed(__Seed)
561     #
562     if hasattr(__Covariance,"isscalar") and __Covariance.isscalar():
563         # Traitement d'une covariance multiple de l'identité
564         __zero = 0.
565         __std  = numpy.sqrt(__Covariance.assparsematrix())
566         __Ensemble += numpy.random.normal(__zero, __std, size=(__m,__n)).T
567     #
568     elif hasattr(__Covariance,"isvector") and __Covariance.isvector():
569         # Traitement d'une covariance diagonale avec variances non identiques
570         __zero = numpy.zeros(__n)
571         __std  = numpy.sqrt(__Covariance.assparsematrix())
572         __Ensemble += numpy.asarray([numpy.random.normal(__zero, __std) for i in range(__m)]).T
573     #
574     elif hasattr(__Covariance,"ismatrix") and __Covariance.ismatrix():
575         # Traitement d'une covariance pleine
576         __Ensemble += numpy.random.multivariate_normal(numpy.zeros(__n), __Covariance.asfullmatrix(__n), size=__m).T
577     #
578     elif isinstance(__Covariance, numpy.ndarray):
579         # Traitement d'une covariance numpy pleine, sachant qu'on arrive ici en dernier
580         __Ensemble += numpy.random.multivariate_normal(numpy.zeros(__n), __Covariance, size=__m).T
581     #
582     else:
583         raise ValueError("Error in ensemble perturbation with inadequate covariance specification")
584     #
585     return __Ensemble
586
587 # ==============================================================================
588 def CovarianceInflation(
589         InputCovOrEns,
590         InflationType   = None,
591         InflationFactor = None,
592         BackgroundCov   = None,
593         ):
594     """
595     Inflation applicable soit sur Pb ou Pa, soit sur les ensembles EXb ou EXa
596
597     Synthèse : Hunt 2007, section 2.3.5
598     """
599     if InflationFactor is None:
600         return InputCovOrEns
601     else:
602         InflationFactor = float(InflationFactor)
603     #
604     if InflationType in ["MultiplicativeOnAnalysisCovariance", "MultiplicativeOnBackgroundCovariance"]:
605         if InflationFactor < 1.:
606             raise ValueError("Inflation factor for multiplicative inflation has to be greater or equal than 1.")
607         if InflationFactor < 1.+mpr:
608             return InputCovOrEns
609         OutputCovOrEns = InflationFactor**2 * InputCovOrEns
610     #
611     elif InflationType in ["MultiplicativeOnAnalysisAnomalies", "MultiplicativeOnBackgroundAnomalies"]:
612         if InflationFactor < 1.:
613             raise ValueError("Inflation factor for multiplicative inflation has to be greater or equal than 1.")
614         if InflationFactor < 1.+mpr:
615             return InputCovOrEns
616         InputCovOrEnsMean = InputCovOrEns.mean(axis=1, dtype=mfp).astype('float')
617         OutputCovOrEns = InputCovOrEnsMean[:,numpy.newaxis] \
618             + InflationFactor * (InputCovOrEns - InputCovOrEnsMean[:,numpy.newaxis])
619     #
620     elif InflationType in ["AdditiveOnAnalysisCovariance", "AdditiveOnBackgroundCovariance"]:
621         if InflationFactor < 0.:
622             raise ValueError("Inflation factor for additive inflation has to be greater or equal than 0.")
623         if InflationFactor < mpr:
624             return InputCovOrEns
625         __n, __m = numpy.asarray(InputCovOrEns).shape
626         if __n != __m:
627             raise ValueError("Additive inflation can only be applied to squared (covariance) matrix.")
628         OutputCovOrEns = (1. - InflationFactor) * InputCovOrEns + InflationFactor * numpy.identity(__n)
629     #
630     elif InflationType == "HybridOnBackgroundCovariance":
631         if InflationFactor < 0.:
632             raise ValueError("Inflation factor for hybrid inflation has to be greater or equal than 0.")
633         if InflationFactor < mpr:
634             return InputCovOrEns
635         __n, __m = numpy.asarray(InputCovOrEns).shape
636         if __n != __m:
637             raise ValueError("Additive inflation can only be applied to squared (covariance) matrix.")
638         if BackgroundCov is None:
639             raise ValueError("Background covariance matrix B has to be given for hybrid inflation.")
640         if InputCovOrEns.shape != BackgroundCov.shape:
641             raise ValueError("Ensemble covariance matrix has to be of same size than background covariance matrix B.")
642         OutputCovOrEns = (1. - InflationFactor) * InputCovOrEns + InflationFactor * BackgroundCov
643     #
644     elif InflationType == "Relaxation":
645         raise NotImplementedError("InflationType Relaxation")
646     #
647     else:
648         raise ValueError("Error in inflation type, '%s' is not a valid keyword."%InflationType)
649     #
650     return OutputCovOrEns
651
652 # ==============================================================================
653 def QuantilesEstimations(selfA, A, Xa, HXa = None, Hm = None, HtM = None):
654     "Estimation des quantiles a posteriori (selfA est modifié)"
655     nbsamples = selfA._parameters["NumberOfSamplesForQuantiles"]
656     #
657     # Traitement des bornes
658     if "StateBoundsForQuantiles" in selfA._parameters:
659         LBounds = selfA._parameters["StateBoundsForQuantiles"] # Prioritaire
660     elif "Bounds" in selfA._parameters:
661         LBounds = selfA._parameters["Bounds"]  # Défaut raisonnable
662     else:
663         LBounds = None
664     if LBounds is not None:
665         LBounds = ForceNumericBounds( LBounds )
666     _Xa = numpy.ravel(Xa)
667     #
668     # Échantillonnage des états
669     YfQ  = None
670     EXr  = None
671     for i in range(nbsamples):
672         if selfA._parameters["SimulationForQuantiles"] == "Linear" and HtM is not None and HXa is not None:
673             dXr = (numpy.random.multivariate_normal(_Xa,A) - _Xa).reshape((-1,1))
674             if LBounds is not None: # "EstimateProjection" par défaut
675                 dXr = numpy.max(numpy.hstack((dXr,LBounds[:,0].reshape((-1,1))) - Xa),axis=1)
676                 dXr = numpy.min(numpy.hstack((dXr,LBounds[:,1].reshape((-1,1))) - Xa),axis=1)
677             dYr = HtM @ dXr
678             Yr = HXa.reshape((-1,1)) + dYr
679             if selfA._toStore("SampledStateForQuantiles"): Xr = _Xa + numpy.ravel(dXr)
680         elif selfA._parameters["SimulationForQuantiles"] == "NonLinear" and Hm is not None:
681             Xr = numpy.random.multivariate_normal(_Xa,A)
682             if LBounds is not None: # "EstimateProjection" par défaut
683                 Xr = numpy.max(numpy.hstack((Xr.reshape((-1,1)),LBounds[:,0].reshape((-1,1)))),axis=1)
684                 Xr = numpy.min(numpy.hstack((Xr.reshape((-1,1)),LBounds[:,1].reshape((-1,1)))),axis=1)
685             Yr = Hm( Xr )
686         else:
687             raise ValueError("Quantile simulations has only to be Linear or NonLinear.")
688         #
689         if YfQ is None:
690             YfQ = Yr.reshape((-1,1))
691             if selfA._toStore("SampledStateForQuantiles"): EXr = Xr.reshape((-1,1))
692         else:
693             YfQ = numpy.hstack((YfQ,Yr.reshape((-1,1))))
694             if selfA._toStore("SampledStateForQuantiles"): EXr = numpy.hstack((EXr,Xr.reshape((-1,1))))
695     #
696     # Extraction des quantiles
697     YfQ.sort(axis=-1)
698     YQ = None
699     for quantile in selfA._parameters["Quantiles"]:
700         if not (0. <= float(quantile) <= 1.): continue
701         indice = int(nbsamples * float(quantile) - 1./nbsamples)
702         if YQ is None: YQ = YfQ[:,indice].reshape((-1,1))
703         else:          YQ = numpy.hstack((YQ,YfQ[:,indice].reshape((-1,1))))
704     if YQ is not None: # Liste non vide de quantiles
705         selfA.StoredVariables["SimulationQuantiles"].store( YQ )
706     if selfA._toStore("SampledStateForQuantiles"):
707         selfA.StoredVariables["SampledStateForQuantiles"].store( EXr )
708     #
709     return 0
710
711 # ==============================================================================
712 def ForceNumericBounds( __Bounds ):
713     "Force les bornes à être des valeurs numériques, sauf si globalement None"
714     # Conserve une valeur par défaut à None s'il n'y a pas de bornes
715     if __Bounds is None: return None
716     # Converti toutes les bornes individuelles None à +/- l'infini
717     __Bounds = numpy.asarray( __Bounds, dtype=float )
718     if len(__Bounds.shape) != 2 or min(__Bounds.shape) <= 0 or __Bounds.shape[1] != 2:
719         raise ValueError("Incorrectly defined bounds data")
720     __Bounds[numpy.isnan(__Bounds)[:,0],0] = -sys.float_info.max
721     __Bounds[numpy.isnan(__Bounds)[:,1],1] =  sys.float_info.max
722     return __Bounds
723
724 # ==============================================================================
725 def RecentredBounds( __Bounds, __Center):
726     "Recentre les bornes autour de 0, sauf si globalement None"
727     # Conserve une valeur par défaut à None s'il n'y a pas de bornes
728     if __Bounds is None: return None
729     # Recentre les valeurs numériques de bornes
730     return ForceNumericBounds( __Bounds ) - numpy.ravel( __Center ).transpose((-1,1))
731
732 # ==============================================================================
733 def cekf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
734     """
735     Contrained Extended Kalman Filter
736     """
737     if selfA._parameters["EstimationOf"] == "Parameters":
738         selfA._parameters["StoreInternalVariables"] = True
739     selfA._parameters["Bounds"] = ForceNumericBounds( selfA._parameters["Bounds"] )
740     #
741     # Opérateurs
742     H = HO["Direct"].appliedControledFormTo
743     #
744     if selfA._parameters["EstimationOf"] == "State":
745         M = EM["Direct"].appliedControledFormTo
746     #
747     if CM is not None and "Tangent" in CM and U is not None:
748         Cm = CM["Tangent"].asMatrix(Xb)
749     else:
750         Cm = None
751     #
752     # Durée d'observation et tailles
753     if hasattr(Y,"stepnumber"):
754         duration = Y.stepnumber()
755         __p = numpy.cumprod(Y.shape())[-1]
756     else:
757         duration = 2
758         __p = numpy.array(Y).size
759     #
760     # Précalcul des inversions de B et R
761     if selfA._parameters["StoreInternalVariables"] \
762         or selfA._toStore("CostFunctionJ") \
763         or selfA._toStore("CostFunctionJb") \
764         or selfA._toStore("CostFunctionJo") \
765         or selfA._toStore("CurrentOptimum") \
766         or selfA._toStore("APosterioriCovariance"):
767         BI = B.getI()
768         RI = R.getI()
769     #
770     __n = Xb.size
771     #
772     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
773         Xn = Xb
774         Pn = B
775         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
776         selfA.StoredVariables["Analysis"].store( Xb )
777         if selfA._toStore("APosterioriCovariance"):
778             if hasattr(B,"asfullmatrix"):
779                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
780             else:
781                 selfA.StoredVariables["APosterioriCovariance"].store( B )
782         selfA._setInternalState("seed", numpy.random.get_state())
783     elif selfA._parameters["nextStep"]:
784         Xn = selfA._getInternalState("Xn")
785         Pn = selfA._getInternalState("Pn")
786     #
787     if selfA._parameters["EstimationOf"] == "Parameters":
788         XaMin            = Xn
789         previousJMinimum = numpy.finfo(float).max
790     #
791     for step in range(duration-1):
792         if hasattr(Y,"store"):
793             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
794         else:
795             Ynpu = numpy.ravel( Y ).reshape((__p,1))
796         #
797         Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xn)
798         Ht = Ht.reshape(Ynpu.size,Xn.size) # ADAO & check shape
799         Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = Xn)
800         Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape
801         #
802         if selfA._parameters["EstimationOf"] == "State":
803             Mt = EM["Tangent"].asMatrix(ValueForMethodForm = Xn)
804             Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape
805             Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = Xn)
806             Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape
807         #
808         if U is not None:
809             if hasattr(U,"store") and len(U)>1:
810                 Un = numpy.ravel( U[step] ).reshape((-1,1))
811             elif hasattr(U,"store") and len(U)==1:
812                 Un = numpy.ravel( U[0] ).reshape((-1,1))
813             else:
814                 Un = numpy.ravel( U ).reshape((-1,1))
815         else:
816             Un = None
817         #
818         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
819             Xn = numpy.max(numpy.hstack((Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
820             Xn = numpy.min(numpy.hstack((Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
821         #
822         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
823             Xn_predicted = numpy.ravel( M( (Xn, Un) ) ).reshape((__n,1))
824             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
825                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
826                 Xn_predicted = Xn_predicted + Cm * Un
827             Pn_predicted = Q + Mt * Pn * Ma
828         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
829             # --- > Par principe, M = Id, Q = 0
830             Xn_predicted = Xn
831             Pn_predicted = Pn
832         #
833         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
834             Xn_predicted = numpy.max(numpy.hstack((Xn_predicted,numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
835             Xn_predicted = numpy.min(numpy.hstack((Xn_predicted,numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
836         #
837         if selfA._parameters["EstimationOf"] == "State":
838             HX_predicted = numpy.ravel( H( (Xn_predicted, None) ) ).reshape((__p,1))
839             _Innovation  = Ynpu - HX_predicted
840         elif selfA._parameters["EstimationOf"] == "Parameters":
841             HX_predicted = numpy.ravel( H( (Xn_predicted, Un) ) ).reshape((__p,1))
842             _Innovation  = Ynpu - HX_predicted
843             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
844                 _Innovation = _Innovation - Cm * Un
845         #
846         Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
847         Xn = Xn_predicted + Kn * _Innovation
848         Pn = Pn_predicted - Kn * Ht * Pn_predicted
849         #
850         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
851             Xn = numpy.max(numpy.hstack((Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
852             Xn = numpy.min(numpy.hstack((Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
853         #
854         Xa = Xn # Pointeurs
855         #--------------------------
856         selfA._setInternalState("Xn", Xn)
857         selfA._setInternalState("Pn", Pn)
858         #--------------------------
859         #
860         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
861         # ---> avec analysis
862         selfA.StoredVariables["Analysis"].store( Xa )
863         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
864             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( H((Xa, Un)) )
865         if selfA._toStore("InnovationAtCurrentAnalysis"):
866             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
867         # ---> avec current state
868         if selfA._parameters["StoreInternalVariables"] \
869             or selfA._toStore("CurrentState"):
870             selfA.StoredVariables["CurrentState"].store( Xn )
871         if selfA._toStore("ForecastState"):
872             selfA.StoredVariables["ForecastState"].store( Xn_predicted )
873         if selfA._toStore("ForecastCovariance"):
874             selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted )
875         if selfA._toStore("BMA"):
876             selfA.StoredVariables["BMA"].store( Xn_predicted - Xa )
877         if selfA._toStore("InnovationAtCurrentState"):
878             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
879         if selfA._toStore("SimulatedObservationAtCurrentState") \
880             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
881             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
882         # ---> autres
883         if selfA._parameters["StoreInternalVariables"] \
884             or selfA._toStore("CostFunctionJ") \
885             or selfA._toStore("CostFunctionJb") \
886             or selfA._toStore("CostFunctionJo") \
887             or selfA._toStore("CurrentOptimum") \
888             or selfA._toStore("APosterioriCovariance"):
889             Jb  = float( 0.5 * (Xa - Xb).T @ (BI @ (Xa - Xb)) )
890             Jo  = float( 0.5 * _Innovation.T @ (RI @ _Innovation) )
891             J   = Jb + Jo
892             selfA.StoredVariables["CostFunctionJb"].store( Jb )
893             selfA.StoredVariables["CostFunctionJo"].store( Jo )
894             selfA.StoredVariables["CostFunctionJ" ].store( J )
895             #
896             if selfA._toStore("IndexOfOptimum") \
897                 or selfA._toStore("CurrentOptimum") \
898                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
899                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
900                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
901                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
902                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
903             if selfA._toStore("IndexOfOptimum"):
904                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
905             if selfA._toStore("CurrentOptimum"):
906                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
907             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
908                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
909             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
910                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
911             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
912                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
913             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
914                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
915         if selfA._toStore("APosterioriCovariance"):
916             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
917         if selfA._parameters["EstimationOf"] == "Parameters" \
918             and J < previousJMinimum:
919             previousJMinimum    = J
920             XaMin               = Xa
921             if selfA._toStore("APosterioriCovariance"):
922                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
923     #
924     # Stockage final supplémentaire de l'optimum en estimation de paramètres
925     # ----------------------------------------------------------------------
926     if selfA._parameters["EstimationOf"] == "Parameters":
927         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
928         selfA.StoredVariables["Analysis"].store( XaMin )
929         if selfA._toStore("APosterioriCovariance"):
930             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
931         if selfA._toStore("BMA"):
932             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
933     #
934     return 0
935
936 # ==============================================================================
937 def enks(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="EnKS16-KalmanFilterFormula"):
938     """
939     EnKS
940     """
941     #
942     # Opérateurs
943     H = HO["Direct"].appliedControledFormTo
944     #
945     if selfA._parameters["EstimationOf"] == "State":
946         M = EM["Direct"].appliedControledFormTo
947     #
948     if CM is not None and "Tangent" in CM and U is not None:
949         Cm = CM["Tangent"].asMatrix(Xb)
950     else:
951         Cm = None
952     #
953     # Précalcul des inversions de B et R
954     RIdemi = R.sqrtmI()
955     #
956     # Durée d'observation et tailles
957     LagL = selfA._parameters["SmootherLagL"]
958     if (not hasattr(Y,"store")) or (not hasattr(Y,"stepnumber")):
959         raise ValueError("Fixed-lag smoother requires a series of observation")
960     if Y.stepnumber() < LagL:
961         raise ValueError("Fixed-lag smoother requires a series of observation greater then the lag L")
962     duration = Y.stepnumber()
963     __p = numpy.cumprod(Y.shape())[-1]
964     __n = Xb.size
965     __m = selfA._parameters["NumberOfMembers"]
966     #
967     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
968         selfA.StoredVariables["Analysis"].store( Xb )
969         if selfA._toStore("APosterioriCovariance"):
970             if hasattr(B,"asfullmatrix"):
971                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
972             else:
973                 selfA.StoredVariables["APosterioriCovariance"].store( B )
974     #
975     # Calcul direct initial (on privilégie la mémorisation au recalcul)
976     __seed = numpy.random.get_state()
977     selfB = copy.deepcopy(selfA)
978     selfB._parameters["StoreSupplementaryCalculations"] = ["CurrentEnsembleState"]
979     if VariantM == "EnKS16-KalmanFilterFormula":
980         etkf(selfB, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM = "KalmanFilterFormula")
981     else:
982         raise ValueError("VariantM has to be chosen in the authorized methods list.")
983     if LagL > 0:
984         EL  = selfB.StoredVariables["CurrentEnsembleState"][LagL-1]
985     else:
986         EL = EnsembleOfBackgroundPerturbations( Xb, None, __m ) # Cf. etkf
987     selfA._parameters["SetSeed"] = numpy.random.set_state(__seed)
988     #
989     for step in range(LagL,duration-1):
990         #
991         sEL = selfB.StoredVariables["CurrentEnsembleState"][step+1-LagL:step+1]
992         sEL.append(None)
993         #
994         if hasattr(Y,"store"):
995             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
996         else:
997             Ynpu = numpy.ravel( Y ).reshape((__p,1))
998         #
999         if U is not None:
1000             if hasattr(U,"store") and len(U)>1:
1001                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
1002             elif hasattr(U,"store") and len(U)==1:
1003                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
1004             else:
1005                 Un = numpy.asmatrix(numpy.ravel( U )).T
1006         else:
1007             Un = None
1008         #
1009         #--------------------------
1010         if VariantM == "EnKS16-KalmanFilterFormula":
1011             if selfA._parameters["EstimationOf"] == "State": # Forecast
1012                 EL = M( [(EL[:,i], Un) for i in range(__m)],
1013                     argsAsSerie = True,
1014                     returnSerieAsArrayMatrix = True )
1015                 EL = EnsemblePerturbationWithGivenCovariance( EL, Q )
1016                 EZ = H( [(EL[:,i], Un) for i in range(__m)],
1017                     argsAsSerie = True,
1018                     returnSerieAsArrayMatrix = True )
1019                 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
1020                     Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
1021                     EZ = EZ + Cm * Un
1022             elif selfA._parameters["EstimationOf"] == "Parameters":
1023                 # --- > Par principe, M = Id, Q = 0
1024                 EZ = H( [(EL[:,i], Un) for i in range(__m)],
1025                     argsAsSerie = True,
1026                     returnSerieAsArrayMatrix = True )
1027             #
1028             vEm   = EL.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1029             vZm   = EZ.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
1030             #
1031             mS    = RIdemi @ EnsembleOfAnomalies( EZ, vZm, 1./math.sqrt(__m-1) )
1032             mS    = mS.reshape((-1,__m)) # Pour dimension 1
1033             delta = RIdemi @ ( Ynpu - vZm )
1034             mT    = numpy.linalg.inv( numpy.identity(__m) + mS.T @ mS )
1035             vw    = mT @ mS.T @ delta
1036             #
1037             Tdemi = numpy.real(scipy.linalg.sqrtm(mT))
1038             mU    = numpy.identity(__m)
1039             wTU   = (vw.reshape((__m,1)) + math.sqrt(__m-1) * Tdemi @ mU)
1040             #
1041             EX    = EnsembleOfAnomalies( EL, vEm, 1./math.sqrt(__m-1) )
1042             EL    = vEm + EX @ wTU
1043             #
1044             sEL[LagL] = EL
1045             for irl in range(LagL): # Lissage des L précédentes analysis
1046                 vEm = sEL[irl].mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1047                 EX = EnsembleOfAnomalies( sEL[irl], vEm, 1./math.sqrt(__m-1) )
1048                 sEL[irl] = vEm + EX @ wTU
1049             #
1050             # Conservation de l'analyse retrospective d'ordre 0 avant rotation
1051             Xa = sEL[0].mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1052             if selfA._toStore("APosterioriCovariance"):
1053                 EXn = sEL[0]
1054             #
1055             for irl in range(LagL):
1056                 sEL[irl] = sEL[irl+1]
1057             sEL[LagL] = None
1058         #--------------------------
1059         else:
1060             raise ValueError("VariantM has to be chosen in the authorized methods list.")
1061         #
1062         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1063         # ---> avec analysis
1064         selfA.StoredVariables["Analysis"].store( Xa )
1065         if selfA._toStore("APosterioriCovariance"):
1066             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(EXn) )
1067     #
1068     # Stockage des dernières analyses incomplètement remises à jour
1069     for irl in range(LagL):
1070         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1071         Xa = sEL[irl].mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1072         selfA.StoredVariables["Analysis"].store( Xa )
1073     #
1074     return 0
1075
1076 # ==============================================================================
1077 def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"):
1078     """
1079     Ensemble-Transform EnKF
1080     """
1081     if selfA._parameters["EstimationOf"] == "Parameters":
1082         selfA._parameters["StoreInternalVariables"] = True
1083     #
1084     # Opérateurs
1085     H = HO["Direct"].appliedControledFormTo
1086     #
1087     if selfA._parameters["EstimationOf"] == "State":
1088         M = EM["Direct"].appliedControledFormTo
1089     #
1090     if CM is not None and "Tangent" in CM and U is not None:
1091         Cm = CM["Tangent"].asMatrix(Xb)
1092     else:
1093         Cm = None
1094     #
1095     # Durée d'observation et tailles
1096     if hasattr(Y,"stepnumber"):
1097         duration = Y.stepnumber()
1098         __p = numpy.cumprod(Y.shape())[-1]
1099     else:
1100         duration = 2
1101         __p = numpy.array(Y).size
1102     #
1103     # Précalcul des inversions de B et R
1104     if selfA._parameters["StoreInternalVariables"] \
1105         or selfA._toStore("CostFunctionJ") \
1106         or selfA._toStore("CostFunctionJb") \
1107         or selfA._toStore("CostFunctionJo") \
1108         or selfA._toStore("CurrentOptimum") \
1109         or selfA._toStore("APosterioriCovariance"):
1110         BI = B.getI()
1111         RI = R.getI()
1112     elif VariantM != "KalmanFilterFormula":
1113         RI = R.getI()
1114     if VariantM == "KalmanFilterFormula":
1115         RIdemi = R.sqrtmI()
1116     #
1117     __n = Xb.size
1118     __m = selfA._parameters["NumberOfMembers"]
1119     #
1120     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
1121         Xn = EnsembleOfBackgroundPerturbations( Xb, None, __m )
1122         selfA.StoredVariables["Analysis"].store( Xb )
1123         if selfA._toStore("APosterioriCovariance"):
1124             if hasattr(B,"asfullmatrix"):
1125                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
1126             else:
1127                 selfA.StoredVariables["APosterioriCovariance"].store( B )
1128         selfA._setInternalState("seed", numpy.random.get_state())
1129     elif selfA._parameters["nextStep"]:
1130         Xn = selfA._getInternalState("Xn")
1131     #
1132     previousJMinimum = numpy.finfo(float).max
1133     #
1134     for step in range(duration-1):
1135         numpy.random.set_state(selfA._getInternalState("seed"))
1136         if hasattr(Y,"store"):
1137             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
1138         else:
1139             Ynpu = numpy.ravel( Y ).reshape((__p,1))
1140         #
1141         if U is not None:
1142             if hasattr(U,"store") and len(U)>1:
1143                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
1144             elif hasattr(U,"store") and len(U)==1:
1145                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
1146             else:
1147                 Un = numpy.asmatrix(numpy.ravel( U )).T
1148         else:
1149             Un = None
1150         #
1151         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
1152             Xn = CovarianceInflation( Xn,
1153                 selfA._parameters["InflationType"],
1154                 selfA._parameters["InflationFactor"],
1155                 )
1156         #
1157         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
1158             EMX = M( [(Xn[:,i], Un) for i in range(__m)],
1159                 argsAsSerie = True,
1160                 returnSerieAsArrayMatrix = True )
1161             Xn_predicted = EnsemblePerturbationWithGivenCovariance( EMX, Q )
1162             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
1163                 argsAsSerie = True,
1164                 returnSerieAsArrayMatrix = True )
1165             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
1166                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
1167                 Xn_predicted = Xn_predicted + Cm * Un
1168         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
1169             # --- > Par principe, M = Id, Q = 0
1170             Xn_predicted = EMX = Xn
1171             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
1172                 argsAsSerie = True,
1173                 returnSerieAsArrayMatrix = True )
1174         #
1175         # Mean of forecast and observation of forecast
1176         Xfm  = Xn_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1177         Hfm  = HX_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
1178         #
1179         # Anomalies
1180         EaX   = EnsembleOfAnomalies( Xn_predicted, Xfm )
1181         EaHX  = EnsembleOfAnomalies( HX_predicted, Hfm)
1182         #
1183         #--------------------------
1184         if VariantM == "KalmanFilterFormula":
1185             mS    = RIdemi * EaHX / math.sqrt(__m-1)
1186             mS    = mS.reshape((-1,__m)) # Pour dimension 1
1187             delta = RIdemi * ( Ynpu - Hfm )
1188             mT    = numpy.linalg.inv( numpy.identity(__m) + mS.T @ mS )
1189             vw    = mT @ mS.T @ delta
1190             #
1191             Tdemi = numpy.real(scipy.linalg.sqrtm(mT))
1192             mU    = numpy.identity(__m)
1193             #
1194             EaX   = EaX / math.sqrt(__m-1)
1195             Xn    = Xfm + EaX @ ( vw.reshape((__m,1)) + math.sqrt(__m-1) * Tdemi @ mU )
1196         #--------------------------
1197         elif VariantM == "Variational":
1198             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
1199             def CostFunction(w):
1200                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1201                 _Jo = 0.5 * _A.T @ (RI * _A)
1202                 _Jb = 0.5 * (__m-1) * w.T @ w
1203                 _J  = _Jo + _Jb
1204                 return float(_J)
1205             def GradientOfCostFunction(w):
1206                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1207                 _GardJo = - EaHX.T @ (RI * _A)
1208                 _GradJb = (__m-1) * w.reshape((__m,1))
1209                 _GradJ  = _GardJo + _GradJb
1210                 return numpy.ravel(_GradJ)
1211             vw = scipy.optimize.fmin_cg(
1212                 f           = CostFunction,
1213                 x0          = numpy.zeros(__m),
1214                 fprime      = GradientOfCostFunction,
1215                 args        = (),
1216                 disp        = False,
1217                 )
1218             #
1219             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1220             Htb = (__m-1) * numpy.identity(__m)
1221             Hta = Hto + Htb
1222             #
1223             Pta = numpy.linalg.inv( Hta )
1224             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1225             #
1226             Xn  = Xfm + EaX @ (vw[:,None] + EWa)
1227         #--------------------------
1228         elif VariantM == "FiniteSize11": # Jauge Boc2011
1229             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
1230             def CostFunction(w):
1231                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1232                 _Jo = 0.5 * _A.T @ (RI * _A)
1233                 _Jb = 0.5 * __m * math.log(1 + 1/__m + w.T @ w)
1234                 _J  = _Jo + _Jb
1235                 return float(_J)
1236             def GradientOfCostFunction(w):
1237                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1238                 _GardJo = - EaHX.T @ (RI * _A)
1239                 _GradJb = __m * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w)
1240                 _GradJ  = _GardJo + _GradJb
1241                 return numpy.ravel(_GradJ)
1242             vw = scipy.optimize.fmin_cg(
1243                 f           = CostFunction,
1244                 x0          = numpy.zeros(__m),
1245                 fprime      = GradientOfCostFunction,
1246                 args        = (),
1247                 disp        = False,
1248                 )
1249             #
1250             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1251             Htb = __m * \
1252                 ( (1 + 1/__m + vw.T @ vw) * numpy.identity(__m) - 2 * vw @ vw.T ) \
1253                 / (1 + 1/__m + vw.T @ vw)**2
1254             Hta = Hto + Htb
1255             #
1256             Pta = numpy.linalg.inv( Hta )
1257             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1258             #
1259             Xn  = Xfm + EaX @ (vw.reshape((__m,1)) + EWa)
1260         #--------------------------
1261         elif VariantM == "FiniteSize15": # Jauge Boc2015
1262             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
1263             def CostFunction(w):
1264                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1265                 _Jo = 0.5 * _A.T * RI * _A
1266                 _Jb = 0.5 * (__m+1) * math.log(1 + 1/__m + w.T @ w)
1267                 _J  = _Jo + _Jb
1268                 return float(_J)
1269             def GradientOfCostFunction(w):
1270                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1271                 _GardJo = - EaHX.T @ (RI * _A)
1272                 _GradJb = (__m+1) * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w)
1273                 _GradJ  = _GardJo + _GradJb
1274                 return numpy.ravel(_GradJ)
1275             vw = scipy.optimize.fmin_cg(
1276                 f           = CostFunction,
1277                 x0          = numpy.zeros(__m),
1278                 fprime      = GradientOfCostFunction,
1279                 args        = (),
1280                 disp        = False,
1281                 )
1282             #
1283             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1284             Htb = (__m+1) * \
1285                 ( (1 + 1/__m + vw.T @ vw) * numpy.identity(__m) - 2 * vw @ vw.T ) \
1286                 / (1 + 1/__m + vw.T @ vw)**2
1287             Hta = Hto + Htb
1288             #
1289             Pta = numpy.linalg.inv( Hta )
1290             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1291             #
1292             Xn  = Xfm + EaX @ (vw.reshape((__m,1)) + EWa)
1293         #--------------------------
1294         elif VariantM == "FiniteSize16": # Jauge Boc2016
1295             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
1296             def CostFunction(w):
1297                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1298                 _Jo = 0.5 * _A.T @ (RI * _A)
1299                 _Jb = 0.5 * (__m+1) * math.log(1 + 1/__m + w.T @ w / (__m-1))
1300                 _J  = _Jo + _Jb
1301                 return float(_J)
1302             def GradientOfCostFunction(w):
1303                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1304                 _GardJo = - EaHX.T @ (RI * _A)
1305                 _GradJb = ((__m+1) / (__m-1)) * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w / (__m-1))
1306                 _GradJ  = _GardJo + _GradJb
1307                 return numpy.ravel(_GradJ)
1308             vw = scipy.optimize.fmin_cg(
1309                 f           = CostFunction,
1310                 x0          = numpy.zeros(__m),
1311                 fprime      = GradientOfCostFunction,
1312                 args        = (),
1313                 disp        = False,
1314                 )
1315             #
1316             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1317             Htb = ((__m+1) / (__m-1)) * \
1318                 ( (1 + 1/__m + vw.T @ vw / (__m-1)) * numpy.identity(__m) - 2 * vw @ vw.T / (__m-1) ) \
1319                 / (1 + 1/__m + vw.T @ vw / (__m-1))**2
1320             Hta = Hto + Htb
1321             #
1322             Pta = numpy.linalg.inv( Hta )
1323             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1324             #
1325             Xn  = Xfm + EaX @ (vw[:,None] + EWa)
1326         #--------------------------
1327         else:
1328             raise ValueError("VariantM has to be chosen in the authorized methods list.")
1329         #
1330         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
1331             Xn = CovarianceInflation( Xn,
1332                 selfA._parameters["InflationType"],
1333                 selfA._parameters["InflationFactor"],
1334                 )
1335         #
1336         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1337         #--------------------------
1338         selfA._setInternalState("Xn", Xn)
1339         selfA._setInternalState("seed", numpy.random.get_state())
1340         #--------------------------
1341         #
1342         if selfA._parameters["StoreInternalVariables"] \
1343             or selfA._toStore("CostFunctionJ") \
1344             or selfA._toStore("CostFunctionJb") \
1345             or selfA._toStore("CostFunctionJo") \
1346             or selfA._toStore("APosterioriCovariance") \
1347             or selfA._toStore("InnovationAtCurrentAnalysis") \
1348             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
1349             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1350             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
1351             _Innovation = Ynpu - _HXa
1352         #
1353         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1354         # ---> avec analysis
1355         selfA.StoredVariables["Analysis"].store( Xa )
1356         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
1357             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
1358         if selfA._toStore("InnovationAtCurrentAnalysis"):
1359             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
1360         # ---> avec current state
1361         if selfA._parameters["StoreInternalVariables"] \
1362             or selfA._toStore("CurrentState"):
1363             selfA.StoredVariables["CurrentState"].store( Xn )
1364         if selfA._toStore("ForecastState"):
1365             selfA.StoredVariables["ForecastState"].store( EMX )
1366         if selfA._toStore("ForecastCovariance"):
1367             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(EMX) )
1368         if selfA._toStore("BMA"):
1369             selfA.StoredVariables["BMA"].store( EMX - Xa )
1370         if selfA._toStore("InnovationAtCurrentState"):
1371             selfA.StoredVariables["InnovationAtCurrentState"].store( - HX_predicted + Ynpu )
1372         if selfA._toStore("SimulatedObservationAtCurrentState") \
1373             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1374             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
1375         # ---> autres
1376         if selfA._parameters["StoreInternalVariables"] \
1377             or selfA._toStore("CostFunctionJ") \
1378             or selfA._toStore("CostFunctionJb") \
1379             or selfA._toStore("CostFunctionJo") \
1380             or selfA._toStore("CurrentOptimum") \
1381             or selfA._toStore("APosterioriCovariance"):
1382             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
1383             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
1384             J   = Jb + Jo
1385             selfA.StoredVariables["CostFunctionJb"].store( Jb )
1386             selfA.StoredVariables["CostFunctionJo"].store( Jo )
1387             selfA.StoredVariables["CostFunctionJ" ].store( J )
1388             #
1389             if selfA._toStore("IndexOfOptimum") \
1390                 or selfA._toStore("CurrentOptimum") \
1391                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
1392                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
1393                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
1394                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1395                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1396             if selfA._toStore("IndexOfOptimum"):
1397                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1398             if selfA._toStore("CurrentOptimum"):
1399                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
1400             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1401                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
1402             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1403                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1404             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1405                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1406             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1407                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1408         if selfA._toStore("APosterioriCovariance"):
1409             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
1410         if selfA._parameters["EstimationOf"] == "Parameters" \
1411             and J < previousJMinimum:
1412             previousJMinimum    = J
1413             XaMin               = Xa
1414             if selfA._toStore("APosterioriCovariance"):
1415                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
1416         # ---> Pour les smoothers
1417         if selfA._toStore("CurrentEnsembleState"):
1418             selfA.StoredVariables["CurrentEnsembleState"].store( Xn )
1419     #
1420     # Stockage final supplémentaire de l'optimum en estimation de paramètres
1421     # ----------------------------------------------------------------------
1422     if selfA._parameters["EstimationOf"] == "Parameters":
1423         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1424         selfA.StoredVariables["Analysis"].store( XaMin )
1425         if selfA._toStore("APosterioriCovariance"):
1426             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
1427         if selfA._toStore("BMA"):
1428             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
1429     #
1430     return 0
1431
1432 # ==============================================================================
1433 def exkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
1434     """
1435     Extended Kalman Filter
1436     """
1437     if selfA._parameters["EstimationOf"] == "Parameters":
1438         selfA._parameters["StoreInternalVariables"] = True
1439     #
1440     # Opérateurs
1441     H = HO["Direct"].appliedControledFormTo
1442     #
1443     if selfA._parameters["EstimationOf"] == "State":
1444         M = EM["Direct"].appliedControledFormTo
1445     #
1446     if CM is not None and "Tangent" in CM and U is not None:
1447         Cm = CM["Tangent"].asMatrix(Xb)
1448     else:
1449         Cm = None
1450     #
1451     # Durée d'observation et tailles
1452     if hasattr(Y,"stepnumber"):
1453         duration = Y.stepnumber()
1454         __p = numpy.cumprod(Y.shape())[-1]
1455     else:
1456         duration = 2
1457         __p = numpy.array(Y).size
1458     #
1459     # Précalcul des inversions de B et R
1460     if selfA._parameters["StoreInternalVariables"] \
1461         or selfA._toStore("CostFunctionJ") \
1462         or selfA._toStore("CostFunctionJb") \
1463         or selfA._toStore("CostFunctionJo") \
1464         or selfA._toStore("CurrentOptimum") \
1465         or selfA._toStore("APosterioriCovariance"):
1466         BI = B.getI()
1467         RI = R.getI()
1468     #
1469     __n = Xb.size
1470     #
1471     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
1472         Xn = Xb
1473         Pn = B
1474         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1475         selfA.StoredVariables["Analysis"].store( Xb )
1476         if selfA._toStore("APosterioriCovariance"):
1477             if hasattr(B,"asfullmatrix"):
1478                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
1479             else:
1480                 selfA.StoredVariables["APosterioriCovariance"].store( B )
1481         selfA._setInternalState("seed", numpy.random.get_state())
1482     elif selfA._parameters["nextStep"]:
1483         Xn = selfA._getInternalState("Xn")
1484         Pn = selfA._getInternalState("Pn")
1485     #
1486     if selfA._parameters["EstimationOf"] == "Parameters":
1487         XaMin            = Xn
1488         previousJMinimum = numpy.finfo(float).max
1489     #
1490     for step in range(duration-1):
1491         if hasattr(Y,"store"):
1492             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
1493         else:
1494             Ynpu = numpy.ravel( Y ).reshape((__p,1))
1495         #
1496         Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xn)
1497         Ht = Ht.reshape(Ynpu.size,Xn.size) # ADAO & check shape
1498         Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = Xn)
1499         Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape
1500         #
1501         if selfA._parameters["EstimationOf"] == "State":
1502             Mt = EM["Tangent"].asMatrix(ValueForMethodForm = Xn)
1503             Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape
1504             Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = Xn)
1505             Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape
1506         #
1507         if U is not None:
1508             if hasattr(U,"store") and len(U)>1:
1509                 Un = numpy.ravel( U[step] ).reshape((-1,1))
1510             elif hasattr(U,"store") and len(U)==1:
1511                 Un = numpy.ravel( U[0] ).reshape((-1,1))
1512             else:
1513                 Un = numpy.ravel( U ).reshape((-1,1))
1514         else:
1515             Un = None
1516         #
1517         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
1518             Xn_predicted = numpy.ravel( M( (Xn, Un) ) ).reshape((__n,1))
1519             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
1520                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
1521                 Xn_predicted = Xn_predicted + Cm * Un
1522             Pn_predicted = Q + Mt * Pn * Ma
1523         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
1524             # --- > Par principe, M = Id, Q = 0
1525             Xn_predicted = Xn
1526             Pn_predicted = Pn
1527         #
1528         if selfA._parameters["EstimationOf"] == "State":
1529             HX_predicted = numpy.ravel( H( (Xn_predicted, None) ) ).reshape((__p,1))
1530             _Innovation  = Ynpu - HX_predicted
1531         elif selfA._parameters["EstimationOf"] == "Parameters":
1532             HX_predicted = numpy.ravel( H( (Xn_predicted, Un) ) ).reshape((__p,1))
1533             _Innovation  = Ynpu - HX_predicted
1534             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
1535                 _Innovation = _Innovation - Cm * Un
1536         #
1537         Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
1538         Xn = Xn_predicted + Kn * _Innovation
1539         Pn = Pn_predicted - Kn * Ht * Pn_predicted
1540         #
1541         Xa = Xn # Pointeurs
1542         #--------------------------
1543         selfA._setInternalState("Xn", Xn)
1544         selfA._setInternalState("Pn", Pn)
1545         #--------------------------
1546         #
1547         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1548         # ---> avec analysis
1549         selfA.StoredVariables["Analysis"].store( Xa )
1550         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
1551             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( H((Xa, Un)) )
1552         if selfA._toStore("InnovationAtCurrentAnalysis"):
1553             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
1554         # ---> avec current state
1555         if selfA._parameters["StoreInternalVariables"] \
1556             or selfA._toStore("CurrentState"):
1557             selfA.StoredVariables["CurrentState"].store( Xn )
1558         if selfA._toStore("ForecastState"):
1559             selfA.StoredVariables["ForecastState"].store( Xn_predicted )
1560         if selfA._toStore("ForecastCovariance"):
1561             selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted )
1562         if selfA._toStore("BMA"):
1563             selfA.StoredVariables["BMA"].store( Xn_predicted - Xa )
1564         if selfA._toStore("InnovationAtCurrentState"):
1565             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
1566         if selfA._toStore("SimulatedObservationAtCurrentState") \
1567             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1568             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
1569         # ---> autres
1570         if selfA._parameters["StoreInternalVariables"] \
1571             or selfA._toStore("CostFunctionJ") \
1572             or selfA._toStore("CostFunctionJb") \
1573             or selfA._toStore("CostFunctionJo") \
1574             or selfA._toStore("CurrentOptimum") \
1575             or selfA._toStore("APosterioriCovariance"):
1576             Jb  = float( 0.5 * (Xa - Xb).T @ (BI @ (Xa - Xb)) )
1577             Jo  = float( 0.5 * _Innovation.T @ (RI @ _Innovation) )
1578             J   = Jb + Jo
1579             selfA.StoredVariables["CostFunctionJb"].store( Jb )
1580             selfA.StoredVariables["CostFunctionJo"].store( Jo )
1581             selfA.StoredVariables["CostFunctionJ" ].store( J )
1582             #
1583             if selfA._toStore("IndexOfOptimum") \
1584                 or selfA._toStore("CurrentOptimum") \
1585                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
1586                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
1587                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
1588                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1589                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1590             if selfA._toStore("IndexOfOptimum"):
1591                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1592             if selfA._toStore("CurrentOptimum"):
1593                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
1594             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1595                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
1596             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1597                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1598             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1599                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1600             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1601                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1602         if selfA._toStore("APosterioriCovariance"):
1603             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
1604         if selfA._parameters["EstimationOf"] == "Parameters" \
1605             and J < previousJMinimum:
1606             previousJMinimum    = J
1607             XaMin               = Xa
1608             if selfA._toStore("APosterioriCovariance"):
1609                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
1610     #
1611     # Stockage final supplémentaire de l'optimum en estimation de paramètres
1612     # ----------------------------------------------------------------------
1613     if selfA._parameters["EstimationOf"] == "Parameters":
1614         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1615         selfA.StoredVariables["Analysis"].store( XaMin )
1616         if selfA._toStore("APosterioriCovariance"):
1617             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
1618         if selfA._toStore("BMA"):
1619             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
1620     #
1621     return 0
1622
1623 # ==============================================================================
1624 def ienkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="IEnKF12",
1625     BnotT=False, _epsilon=1.e-3, _e=1.e-7, _jmax=15000):
1626     """
1627     Iterative EnKF
1628     """
1629     if selfA._parameters["EstimationOf"] == "Parameters":
1630         selfA._parameters["StoreInternalVariables"] = True
1631     #
1632     # Opérateurs
1633     H = HO["Direct"].appliedControledFormTo
1634     #
1635     if selfA._parameters["EstimationOf"] == "State":
1636         M = EM["Direct"].appliedControledFormTo
1637     #
1638     if CM is not None and "Tangent" in CM and U is not None:
1639         Cm = CM["Tangent"].asMatrix(Xb)
1640     else:
1641         Cm = None
1642     #
1643     # Durée d'observation et tailles
1644     if hasattr(Y,"stepnumber"):
1645         duration = Y.stepnumber()
1646         __p = numpy.cumprod(Y.shape())[-1]
1647     else:
1648         duration = 2
1649         __p = numpy.array(Y).size
1650     #
1651     # Précalcul des inversions de B et R
1652     if selfA._parameters["StoreInternalVariables"] \
1653         or selfA._toStore("CostFunctionJ") \
1654         or selfA._toStore("CostFunctionJb") \
1655         or selfA._toStore("CostFunctionJo") \
1656         or selfA._toStore("CurrentOptimum") \
1657         or selfA._toStore("APosterioriCovariance"):
1658         BI = B.getI()
1659     RI = R.getI()
1660     #
1661     __n = Xb.size
1662     __m = selfA._parameters["NumberOfMembers"]
1663     #
1664     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
1665         if hasattr(B,"asfullmatrix"): Pn = B.asfullmatrix(__n)
1666         else:                         Pn = B
1667         Xn = EnsembleOfBackgroundPerturbations( Xb, Pn, __m )
1668         selfA.StoredVariables["Analysis"].store( Xb )
1669         if selfA._toStore("APosterioriCovariance"):
1670             if hasattr(B,"asfullmatrix"):
1671                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
1672             else:
1673                 selfA.StoredVariables["APosterioriCovariance"].store( B )
1674         selfA._setInternalState("seed", numpy.random.get_state())
1675     elif selfA._parameters["nextStep"]:
1676         Xn = selfA._getInternalState("Xn")
1677     #
1678     previousJMinimum = numpy.finfo(float).max
1679     #
1680     for step in range(duration-1):
1681         numpy.random.set_state(selfA._getInternalState("seed"))
1682         if hasattr(Y,"store"):
1683             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
1684         else:
1685             Ynpu = numpy.ravel( Y ).reshape((__p,1))
1686         #
1687         if U is not None:
1688             if hasattr(U,"store") and len(U)>1:
1689                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
1690             elif hasattr(U,"store") and len(U)==1:
1691                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
1692             else:
1693                 Un = numpy.asmatrix(numpy.ravel( U )).T
1694         else:
1695             Un = None
1696         #
1697         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
1698             Xn = CovarianceInflation( Xn,
1699                 selfA._parameters["InflationType"],
1700                 selfA._parameters["InflationFactor"],
1701                 )
1702         #
1703         #--------------------------
1704         if VariantM == "IEnKF12":
1705             Xfm = numpy.ravel(Xn.mean(axis=1, dtype=mfp).astype('float'))
1706             EaX = EnsembleOfAnomalies( Xn ) / math.sqrt(__m-1)
1707             __j = 0
1708             Deltaw = 1
1709             if not BnotT:
1710                 Ta  = numpy.identity(__m)
1711             vw  = numpy.zeros(__m)
1712             while numpy.linalg.norm(Deltaw) >= _e and __j <= _jmax:
1713                 vx1 = (Xfm + EaX @ vw).reshape((__n,1))
1714                 #
1715                 if BnotT:
1716                     E1 = vx1 + _epsilon * EaX
1717                 else:
1718                     E1 = vx1 + math.sqrt(__m-1) * EaX @ Ta
1719                 #
1720                 if selfA._parameters["EstimationOf"] == "State": # Forecast + Q
1721                     E2 = M( [(E1[:,i,numpy.newaxis], Un) for i in range(__m)],
1722                         argsAsSerie = True,
1723                         returnSerieAsArrayMatrix = True )
1724                 elif selfA._parameters["EstimationOf"] == "Parameters":
1725                     # --- > Par principe, M = Id
1726                     E2 = Xn
1727                 vx2 = E2.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1728                 vy1 = H((vx2, Un)).reshape((__p,1))
1729                 #
1730                 HE2 = H( [(E2[:,i,numpy.newaxis], Un) for i in range(__m)],
1731                     argsAsSerie = True,
1732                     returnSerieAsArrayMatrix = True )
1733                 vy2 = HE2.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
1734                 #
1735                 if BnotT:
1736                     EaY = (HE2 - vy2) / _epsilon
1737                 else:
1738                     EaY = ( (HE2 - vy2) @ numpy.linalg.inv(Ta) ) / math.sqrt(__m-1)
1739                 #
1740                 GradJ = numpy.ravel(vw[:,None] - EaY.transpose() @ (RI * ( Ynpu - vy1 )))
1741                 mH = numpy.identity(__m) + EaY.transpose() @ (RI * EaY).reshape((-1,__m))
1742                 Deltaw = - numpy.linalg.solve(mH,GradJ)
1743                 #
1744                 vw = vw + Deltaw
1745                 #
1746                 if not BnotT:
1747                     Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
1748                 #
1749                 __j = __j + 1
1750             #
1751             A2 = EnsembleOfAnomalies( E2 )
1752             #
1753             if BnotT:
1754                 Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
1755                 A2 = math.sqrt(__m-1) * A2 @ Ta / _epsilon
1756             #
1757             Xn = vx2 + A2
1758         #--------------------------
1759         else:
1760             raise ValueError("VariantM has to be chosen in the authorized methods list.")
1761         #
1762         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
1763             Xn = CovarianceInflation( Xn,
1764                 selfA._parameters["InflationType"],
1765                 selfA._parameters["InflationFactor"],
1766                 )
1767         #
1768         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1769         #--------------------------
1770         selfA._setInternalState("Xn", Xn)
1771         selfA._setInternalState("seed", numpy.random.get_state())
1772         #--------------------------
1773         #
1774         if selfA._parameters["StoreInternalVariables"] \
1775             or selfA._toStore("CostFunctionJ") \
1776             or selfA._toStore("CostFunctionJb") \
1777             or selfA._toStore("CostFunctionJo") \
1778             or selfA._toStore("APosterioriCovariance") \
1779             or selfA._toStore("InnovationAtCurrentAnalysis") \
1780             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
1781             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1782             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
1783             _Innovation = Ynpu - _HXa
1784         #
1785         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1786         # ---> avec analysis
1787         selfA.StoredVariables["Analysis"].store( Xa )
1788         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
1789             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
1790         if selfA._toStore("InnovationAtCurrentAnalysis"):
1791             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
1792         # ---> avec current state
1793         if selfA._parameters["StoreInternalVariables"] \
1794             or selfA._toStore("CurrentState"):
1795             selfA.StoredVariables["CurrentState"].store( Xn )
1796         if selfA._toStore("ForecastState"):
1797             selfA.StoredVariables["ForecastState"].store( E2 )
1798         if selfA._toStore("ForecastCovariance"):
1799             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(E2) )
1800         if selfA._toStore("BMA"):
1801             selfA.StoredVariables["BMA"].store( E2 - Xa )
1802         if selfA._toStore("InnovationAtCurrentState"):
1803             selfA.StoredVariables["InnovationAtCurrentState"].store( - HE2 + Ynpu )
1804         if selfA._toStore("SimulatedObservationAtCurrentState") \
1805             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1806             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HE2 )
1807         # ---> autres
1808         if selfA._parameters["StoreInternalVariables"] \
1809             or selfA._toStore("CostFunctionJ") \
1810             or selfA._toStore("CostFunctionJb") \
1811             or selfA._toStore("CostFunctionJo") \
1812             or selfA._toStore("CurrentOptimum") \
1813             or selfA._toStore("APosterioriCovariance"):
1814             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
1815             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
1816             J   = Jb + Jo
1817             selfA.StoredVariables["CostFunctionJb"].store( Jb )
1818             selfA.StoredVariables["CostFunctionJo"].store( Jo )
1819             selfA.StoredVariables["CostFunctionJ" ].store( J )
1820             #
1821             if selfA._toStore("IndexOfOptimum") \
1822                 or selfA._toStore("CurrentOptimum") \
1823                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
1824                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
1825                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
1826                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1827                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1828             if selfA._toStore("IndexOfOptimum"):
1829                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1830             if selfA._toStore("CurrentOptimum"):
1831                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
1832             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1833                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
1834             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1835                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1836             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1837                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1838             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1839                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1840         if selfA._toStore("APosterioriCovariance"):
1841             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
1842         if selfA._parameters["EstimationOf"] == "Parameters" \
1843             and J < previousJMinimum:
1844             previousJMinimum    = J
1845             XaMin               = Xa
1846             if selfA._toStore("APosterioriCovariance"):
1847                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
1848         # ---> Pour les smoothers
1849         if selfA._toStore("CurrentEnsembleState"):
1850             selfA.StoredVariables["CurrentEnsembleState"].store( Xn )
1851     #
1852     # Stockage final supplémentaire de l'optimum en estimation de paramètres
1853     # ----------------------------------------------------------------------
1854     if selfA._parameters["EstimationOf"] == "Parameters":
1855         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1856         selfA.StoredVariables["Analysis"].store( XaMin )
1857         if selfA._toStore("APosterioriCovariance"):
1858             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
1859         if selfA._toStore("BMA"):
1860             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
1861     #
1862     return 0
1863
1864 # ==============================================================================
1865 def incr3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
1866     """
1867     3DVAR incrémental
1868     """
1869     #
1870     # Initialisations
1871     # ---------------
1872     #
1873     # Opérateur non-linéaire pour la boucle externe
1874     Hm = HO["Direct"].appliedTo
1875     #
1876     # Précalcul des inversions de B et R
1877     BI = B.getI()
1878     RI = R.getI()
1879     #
1880     # Point de démarrage de l'optimisation
1881     Xini = selfA._parameters["InitializationPoint"]
1882     #
1883     HXb = numpy.asmatrix(numpy.ravel( Hm( Xb ) )).T
1884     Innovation = Y - HXb
1885     #
1886     # Outer Loop
1887     # ----------
1888     iOuter = 0
1889     J      = 1./mpr
1890     DeltaJ = 1./mpr
1891     Xr     = Xini.reshape((-1,1))
1892     while abs(DeltaJ) >= selfA._parameters["CostDecrementTolerance"] and iOuter <= selfA._parameters["MaximumNumberOfSteps"]:
1893         #
1894         # Inner Loop
1895         # ----------
1896         Ht = HO["Tangent"].asMatrix(Xr)
1897         Ht = Ht.reshape(Y.size,Xr.size) # ADAO & check shape
1898         #
1899         # Définition de la fonction-coût
1900         # ------------------------------
1901         def CostFunction(dx):
1902             _dX  = numpy.asmatrix(numpy.ravel( dx )).T
1903             if selfA._parameters["StoreInternalVariables"] or \
1904                 selfA._toStore("CurrentState") or \
1905                 selfA._toStore("CurrentOptimum"):
1906                 selfA.StoredVariables["CurrentState"].store( Xb + _dX )
1907             _HdX = Ht * _dX
1908             _HdX = numpy.asmatrix(numpy.ravel( _HdX )).T
1909             _dInnovation = Innovation - _HdX
1910             if selfA._toStore("SimulatedObservationAtCurrentState") or \
1911                 selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1912                 selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HXb + _HdX )
1913             if selfA._toStore("InnovationAtCurrentState"):
1914                 selfA.StoredVariables["InnovationAtCurrentState"].store( _dInnovation )
1915             #
1916             Jb  = float( 0.5 * _dX.T * BI * _dX )
1917             Jo  = float( 0.5 * _dInnovation.T * RI * _dInnovation )
1918             J   = Jb + Jo
1919             #
1920             selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
1921             selfA.StoredVariables["CostFunctionJb"].store( Jb )
1922             selfA.StoredVariables["CostFunctionJo"].store( Jo )
1923             selfA.StoredVariables["CostFunctionJ" ].store( J )
1924             if selfA._toStore("IndexOfOptimum") or \
1925                 selfA._toStore("CurrentOptimum") or \
1926                 selfA._toStore("CostFunctionJAtCurrentOptimum") or \
1927                 selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
1928                 selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
1929                 selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1930                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1931             if selfA._toStore("IndexOfOptimum"):
1932                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1933             if selfA._toStore("CurrentOptimum"):
1934                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
1935             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1936                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
1937             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1938                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1939             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1940                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1941             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1942                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1943             return J
1944         #
1945         def GradientOfCostFunction(dx):
1946             _dX          = numpy.asmatrix(numpy.ravel( dx )).T
1947             _HdX         = Ht * _dX
1948             _HdX         = numpy.asmatrix(numpy.ravel( _HdX )).T
1949             _dInnovation = Innovation - _HdX
1950             GradJb       = BI * _dX
1951             GradJo       = - Ht.T @ (RI * _dInnovation)
1952             GradJ        = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
1953             return GradJ
1954         #
1955         # Minimisation de la fonctionnelle
1956         # --------------------------------
1957         nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
1958         #
1959         if selfA._parameters["Minimizer"] == "LBFGSB":
1960             # Minimum, J_optimal, Informations = scipy.optimize.fmin_l_bfgs_b(
1961             if "0.19" <= scipy.version.version <= "1.1.0":
1962                 import lbfgsbhlt as optimiseur
1963             else:
1964                 import scipy.optimize as optimiseur
1965             Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
1966                 func        = CostFunction,
1967                 x0          = numpy.zeros(Xini.size),
1968                 fprime      = GradientOfCostFunction,
1969                 args        = (),
1970                 bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
1971                 maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
1972                 factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
1973                 pgtol       = selfA._parameters["ProjectedGradientTolerance"],
1974                 iprint      = selfA._parameters["optiprint"],
1975                 )
1976             nfeval = Informations['funcalls']
1977             rc     = Informations['warnflag']
1978         elif selfA._parameters["Minimizer"] == "TNC":
1979             Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
1980                 func        = CostFunction,
1981                 x0          = numpy.zeros(Xini.size),
1982                 fprime      = GradientOfCostFunction,
1983                 args        = (),
1984                 bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
1985                 maxfun      = selfA._parameters["MaximumNumberOfSteps"],
1986                 pgtol       = selfA._parameters["ProjectedGradientTolerance"],
1987                 ftol        = selfA._parameters["CostDecrementTolerance"],
1988                 messages    = selfA._parameters["optmessages"],
1989                 )
1990         elif selfA._parameters["Minimizer"] == "CG":
1991             Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
1992                 f           = CostFunction,
1993                 x0          = numpy.zeros(Xini.size),
1994                 fprime      = GradientOfCostFunction,
1995                 args        = (),
1996                 maxiter     = selfA._parameters["MaximumNumberOfSteps"],
1997                 gtol        = selfA._parameters["GradientNormTolerance"],
1998                 disp        = selfA._parameters["optdisp"],
1999                 full_output = True,
2000                 )
2001         elif selfA._parameters["Minimizer"] == "NCG":
2002             Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
2003                 f           = CostFunction,
2004                 x0          = numpy.zeros(Xini.size),
2005                 fprime      = GradientOfCostFunction,
2006                 args        = (),
2007                 maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2008                 avextol     = selfA._parameters["CostDecrementTolerance"],
2009                 disp        = selfA._parameters["optdisp"],
2010                 full_output = True,
2011                 )
2012         elif selfA._parameters["Minimizer"] == "BFGS":
2013             Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
2014                 f           = CostFunction,
2015                 x0          = numpy.zeros(Xini.size),
2016                 fprime      = GradientOfCostFunction,
2017                 args        = (),
2018                 maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2019                 gtol        = selfA._parameters["GradientNormTolerance"],
2020                 disp        = selfA._parameters["optdisp"],
2021                 full_output = True,
2022                 )
2023         else:
2024             raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
2025         #
2026         IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2027         MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
2028         #
2029         if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
2030             Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
2031             Minimum = numpy.asmatrix(numpy.ravel( Minimum )).T
2032         else:
2033             Minimum = Xb + numpy.asmatrix(numpy.ravel( Minimum )).T
2034         #
2035         Xr     = Minimum
2036         DeltaJ = selfA.StoredVariables["CostFunctionJ" ][-1] - J
2037         iOuter = selfA.StoredVariables["CurrentIterationNumber"][-1]
2038     #
2039     # Obtention de l'analyse
2040     # ----------------------
2041     Xa = Xr
2042     #
2043     selfA.StoredVariables["Analysis"].store( Xa )
2044     #
2045     if selfA._toStore("OMA") or \
2046         selfA._toStore("SigmaObs2") or \
2047         selfA._toStore("SimulationQuantiles") or \
2048         selfA._toStore("SimulatedObservationAtOptimum"):
2049         if selfA._toStore("SimulatedObservationAtCurrentState"):
2050             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
2051         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2052             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
2053         else:
2054             HXa = Hm( Xa )
2055     #
2056     # Calcul de la covariance d'analyse
2057     # ---------------------------------
2058     if selfA._toStore("APosterioriCovariance") or \
2059         selfA._toStore("SimulationQuantiles") or \
2060         selfA._toStore("JacobianMatrixAtOptimum") or \
2061         selfA._toStore("KalmanGainAtOptimum"):
2062         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
2063         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
2064     if selfA._toStore("APosterioriCovariance") or \
2065         selfA._toStore("SimulationQuantiles") or \
2066         selfA._toStore("KalmanGainAtOptimum"):
2067         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
2068         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
2069     if selfA._toStore("APosterioriCovariance") or \
2070         selfA._toStore("SimulationQuantiles"):
2071         HessienneI = []
2072         nb = Xa.size
2073         for i in range(nb):
2074             _ee    = numpy.matrix(numpy.zeros(nb)).T
2075             _ee[i] = 1.
2076             _HtEE  = numpy.dot(HtM,_ee)
2077             _HtEE  = numpy.asmatrix(numpy.ravel( _HtEE )).T
2078             HessienneI.append( numpy.ravel( BI*_ee + HaM * (RI * _HtEE) ) )
2079         HessienneI = numpy.matrix( HessienneI )
2080         A = HessienneI.I
2081         if min(A.shape) != max(A.shape):
2082             raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(selfA._name,str(A.shape)))
2083         if (numpy.diag(A) < 0).any():
2084             raise ValueError("The %s a posteriori covariance matrix A has at least one negative value on its diagonal. There is an error in the observation operator, please check it."%(selfA._name,))
2085         if logging.getLogger().level < logging.WARNING: # La verification n'a lieu qu'en debug
2086             try:
2087                 L = numpy.linalg.cholesky( A )
2088             except:
2089                 raise ValueError("The %s a posteriori covariance matrix A is not symmetric positive-definite. Please check your a priori covariances and your observation operator."%(selfA._name,))
2090     if selfA._toStore("APosterioriCovariance"):
2091         selfA.StoredVariables["APosterioriCovariance"].store( A )
2092     if selfA._toStore("JacobianMatrixAtOptimum"):
2093         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
2094     if selfA._toStore("KalmanGainAtOptimum"):
2095         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
2096         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
2097         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
2098     #
2099     # Calculs et/ou stockages supplémentaires
2100     # ---------------------------------------
2101     if selfA._toStore("Innovation") or \
2102         selfA._toStore("SigmaObs2") or \
2103         selfA._toStore("MahalanobisConsistency") or \
2104         selfA._toStore("OMB"):
2105         d  = Y - HXb
2106     if selfA._toStore("Innovation"):
2107         selfA.StoredVariables["Innovation"].store( numpy.ravel(d) )
2108     if selfA._toStore("BMA"):
2109         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
2110     if selfA._toStore("OMA"):
2111         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
2112     if selfA._toStore("OMB"):
2113         selfA.StoredVariables["OMB"].store( numpy.ravel(d) )
2114     if selfA._toStore("SigmaObs2"):
2115         TraceR = R.trace(Y.size)
2116         selfA.StoredVariables["SigmaObs2"].store( float( (d.T * (numpy.asmatrix(numpy.ravel(Y)).T-numpy.asmatrix(numpy.ravel(HXa)).T)) ) / TraceR )
2117     if selfA._toStore("MahalanobisConsistency"):
2118         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
2119     if selfA._toStore("SimulationQuantiles"):
2120         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
2121     if selfA._toStore("SimulatedObservationAtBackground"):
2122         selfA.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
2123     if selfA._toStore("SimulatedObservationAtOptimum"):
2124         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
2125     #
2126     return 0
2127
2128 # ==============================================================================
2129 def mlef(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="MLEF13",
2130     BnotT=False, _epsilon=1.e-3, _e=1.e-7, _jmax=15000):
2131     """
2132     Maximum Likelihood Ensemble Filter
2133     """
2134     if selfA._parameters["EstimationOf"] == "Parameters":
2135         selfA._parameters["StoreInternalVariables"] = True
2136     #
2137     # Opérateurs
2138     H = HO["Direct"].appliedControledFormTo
2139     #
2140     if selfA._parameters["EstimationOf"] == "State":
2141         M = EM["Direct"].appliedControledFormTo
2142     #
2143     if CM is not None and "Tangent" in CM and U is not None:
2144         Cm = CM["Tangent"].asMatrix(Xb)
2145     else:
2146         Cm = None
2147     #
2148     # Durée d'observation et tailles
2149     if hasattr(Y,"stepnumber"):
2150         duration = Y.stepnumber()
2151         __p = numpy.cumprod(Y.shape())[-1]
2152     else:
2153         duration = 2
2154         __p = numpy.array(Y).size
2155     #
2156     # Précalcul des inversions de B et R
2157     if selfA._parameters["StoreInternalVariables"] \
2158         or selfA._toStore("CostFunctionJ") \
2159         or selfA._toStore("CostFunctionJb") \
2160         or selfA._toStore("CostFunctionJo") \
2161         or selfA._toStore("CurrentOptimum") \
2162         or selfA._toStore("APosterioriCovariance"):
2163         BI = B.getI()
2164     RI = R.getI()
2165     #
2166     __n = Xb.size
2167     __m = selfA._parameters["NumberOfMembers"]
2168     #
2169     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
2170         Xn = EnsembleOfBackgroundPerturbations( Xb, None, __m )
2171         selfA.StoredVariables["Analysis"].store( Xb )
2172         if selfA._toStore("APosterioriCovariance"):
2173             if hasattr(B,"asfullmatrix"):
2174                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
2175             else:
2176                 selfA.StoredVariables["APosterioriCovariance"].store( B )
2177         selfA._setInternalState("seed", numpy.random.get_state())
2178     elif selfA._parameters["nextStep"]:
2179         Xn = selfA._getInternalState("Xn")
2180     #
2181     previousJMinimum = numpy.finfo(float).max
2182     #
2183     for step in range(duration-1):
2184         numpy.random.set_state(selfA._getInternalState("seed"))
2185         if hasattr(Y,"store"):
2186             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
2187         else:
2188             Ynpu = numpy.ravel( Y ).reshape((__p,1))
2189         #
2190         if U is not None:
2191             if hasattr(U,"store") and len(U)>1:
2192                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
2193             elif hasattr(U,"store") and len(U)==1:
2194                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
2195             else:
2196                 Un = numpy.asmatrix(numpy.ravel( U )).T
2197         else:
2198             Un = None
2199         #
2200         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
2201             Xn = CovarianceInflation( Xn,
2202                 selfA._parameters["InflationType"],
2203                 selfA._parameters["InflationFactor"],
2204                 )
2205         #
2206         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
2207             EMX = M( [(Xn[:,i], Un) for i in range(__m)],
2208                 argsAsSerie = True,
2209                 returnSerieAsArrayMatrix = True )
2210             Xn_predicted = EnsemblePerturbationWithGivenCovariance( EMX, Q )
2211             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
2212                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
2213                 Xn_predicted = Xn_predicted + Cm * Un
2214         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
2215             # --- > Par principe, M = Id, Q = 0
2216             Xn_predicted = EMX = Xn
2217         #
2218         #--------------------------
2219         if VariantM == "MLEF13":
2220             Xfm = numpy.ravel(Xn_predicted.mean(axis=1, dtype=mfp).astype('float'))
2221             EaX = EnsembleOfAnomalies( Xn_predicted, Xfm, 1./math.sqrt(__m-1) )
2222             Ua  = numpy.identity(__m)
2223             __j = 0
2224             Deltaw = 1
2225             if not BnotT:
2226                 Ta  = numpy.identity(__m)
2227             vw  = numpy.zeros(__m)
2228             while numpy.linalg.norm(Deltaw) >= _e and __j <= _jmax:
2229                 vx1 = (Xfm + EaX @ vw).reshape((__n,1))
2230                 #
2231                 if BnotT:
2232                     E1 = vx1 + _epsilon * EaX
2233                 else:
2234                     E1 = vx1 + math.sqrt(__m-1) * EaX @ Ta
2235                 #
2236                 HE2 = H( [(E1[:,i,numpy.newaxis], Un) for i in range(__m)],
2237                     argsAsSerie = True,
2238                     returnSerieAsArrayMatrix = True )
2239                 vy2 = HE2.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
2240                 #
2241                 if BnotT:
2242                     EaY = (HE2 - vy2) / _epsilon
2243                 else:
2244                     EaY = ( (HE2 - vy2) @ numpy.linalg.inv(Ta) ) / math.sqrt(__m-1)
2245                 #
2246                 GradJ = numpy.ravel(vw[:,None] - EaY.transpose() @ (RI * ( Ynpu - vy2 )))
2247                 mH = numpy.identity(__m) + EaY.transpose() @ (RI * EaY).reshape((-1,__m))
2248                 Deltaw = - numpy.linalg.solve(mH,GradJ)
2249                 #
2250                 vw = vw + Deltaw
2251                 #
2252                 if not BnotT:
2253                     Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
2254                 #
2255                 __j = __j + 1
2256             #
2257             if BnotT:
2258                 Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
2259             #
2260             Xn = vx1 + math.sqrt(__m-1) * EaX @ Ta @ Ua
2261         #--------------------------
2262         else:
2263             raise ValueError("VariantM has to be chosen in the authorized methods list.")
2264         #
2265         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
2266             Xn = CovarianceInflation( Xn,
2267                 selfA._parameters["InflationType"],
2268                 selfA._parameters["InflationFactor"],
2269                 )
2270         #
2271         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
2272         #--------------------------
2273         selfA._setInternalState("Xn", Xn)
2274         selfA._setInternalState("seed", numpy.random.get_state())
2275         #--------------------------
2276         #
2277         if selfA._parameters["StoreInternalVariables"] \
2278             or selfA._toStore("CostFunctionJ") \
2279             or selfA._toStore("CostFunctionJb") \
2280             or selfA._toStore("CostFunctionJo") \
2281             or selfA._toStore("APosterioriCovariance") \
2282             or selfA._toStore("InnovationAtCurrentAnalysis") \
2283             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
2284             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2285             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
2286             _Innovation = Ynpu - _HXa
2287         #
2288         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2289         # ---> avec analysis
2290         selfA.StoredVariables["Analysis"].store( Xa )
2291         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
2292             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
2293         if selfA._toStore("InnovationAtCurrentAnalysis"):
2294             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
2295         # ---> avec current state
2296         if selfA._parameters["StoreInternalVariables"] \
2297             or selfA._toStore("CurrentState"):
2298             selfA.StoredVariables["CurrentState"].store( Xn )
2299         if selfA._toStore("ForecastState"):
2300             selfA.StoredVariables["ForecastState"].store( EMX )
2301         if selfA._toStore("ForecastCovariance"):
2302             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(EMX) )
2303         if selfA._toStore("BMA"):
2304             selfA.StoredVariables["BMA"].store( EMX - Xa )
2305         if selfA._toStore("InnovationAtCurrentState"):
2306             selfA.StoredVariables["InnovationAtCurrentState"].store( - HE2 + Ynpu )
2307         if selfA._toStore("SimulatedObservationAtCurrentState") \
2308             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2309             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HE2 )
2310         # ---> autres
2311         if selfA._parameters["StoreInternalVariables"] \
2312             or selfA._toStore("CostFunctionJ") \
2313             or selfA._toStore("CostFunctionJb") \
2314             or selfA._toStore("CostFunctionJo") \
2315             or selfA._toStore("CurrentOptimum") \
2316             or selfA._toStore("APosterioriCovariance"):
2317             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
2318             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
2319             J   = Jb + Jo
2320             selfA.StoredVariables["CostFunctionJb"].store( Jb )
2321             selfA.StoredVariables["CostFunctionJo"].store( Jo )
2322             selfA.StoredVariables["CostFunctionJ" ].store( J )
2323             #
2324             if selfA._toStore("IndexOfOptimum") \
2325                 or selfA._toStore("CurrentOptimum") \
2326                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
2327                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
2328                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
2329                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2330                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2331             if selfA._toStore("IndexOfOptimum"):
2332                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
2333             if selfA._toStore("CurrentOptimum"):
2334                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
2335             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2336                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
2337             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
2338                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
2339             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
2340                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
2341             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
2342                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
2343         if selfA._toStore("APosterioriCovariance"):
2344             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
2345         if selfA._parameters["EstimationOf"] == "Parameters" \
2346             and J < previousJMinimum:
2347             previousJMinimum    = J
2348             XaMin               = Xa
2349             if selfA._toStore("APosterioriCovariance"):
2350                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
2351         # ---> Pour les smoothers
2352         if selfA._toStore("CurrentEnsembleState"):
2353             selfA.StoredVariables["CurrentEnsembleState"].store( Xn )
2354     #
2355     # Stockage final supplémentaire de l'optimum en estimation de paramètres
2356     # ----------------------------------------------------------------------
2357     if selfA._parameters["EstimationOf"] == "Parameters":
2358         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2359         selfA.StoredVariables["Analysis"].store( XaMin )
2360         if selfA._toStore("APosterioriCovariance"):
2361             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
2362         if selfA._toStore("BMA"):
2363             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
2364     #
2365     return 0
2366
2367 # ==============================================================================
2368 def mmqr(
2369         func     = None,
2370         x0       = None,
2371         fprime   = None,
2372         bounds   = None,
2373         quantile = 0.5,
2374         maxfun   = 15000,
2375         toler    = 1.e-06,
2376         y        = None,
2377         ):
2378     """
2379     Implémentation informatique de l'algorithme MMQR, basée sur la publication :
2380     David R. Hunter, Kenneth Lange, "Quantile Regression via an MM Algorithm",
2381     Journal of Computational and Graphical Statistics, 9, 1, pp.60-77, 2000.
2382     """
2383     #
2384     # Recuperation des donnees et informations initiales
2385     # --------------------------------------------------
2386     variables = numpy.ravel( x0 )
2387     mesures   = numpy.ravel( y )
2388     increment = sys.float_info[0]
2389     p         = variables.size
2390     n         = mesures.size
2391     quantile  = float(quantile)
2392     #
2393     # Calcul des parametres du MM
2394     # ---------------------------
2395     tn      = float(toler) / n
2396     e0      = -tn / math.log(tn)
2397     epsilon = (e0-tn)/(1+math.log(e0))
2398     #
2399     # Calculs d'initialisation
2400     # ------------------------
2401     residus  = mesures - numpy.ravel( func( variables ) )
2402     poids    = 1./(epsilon+numpy.abs(residus))
2403     veps     = 1. - 2. * quantile - residus * poids
2404     lastsurrogate = -numpy.sum(residus*veps) - (1.-2.*quantile)*numpy.sum(residus)
2405     iteration = 0
2406     #
2407     # Recherche iterative
2408     # -------------------
2409     while (increment > toler) and (iteration < maxfun) :
2410         iteration += 1
2411         #
2412         Derivees  = numpy.array(fprime(variables))
2413         Derivees  = Derivees.reshape(n,p) # Necessaire pour remettre en place la matrice si elle passe par des tuyaux YACS
2414         DeriveesT = Derivees.transpose()
2415         M         =   numpy.dot( DeriveesT , (numpy.array(numpy.matrix(p*[poids,]).T)*Derivees) )
2416         SM        =   numpy.transpose(numpy.dot( DeriveesT , veps ))
2417         step      = - numpy.linalg.lstsq( M, SM, rcond=-1 )[0]
2418         #
2419         variables = variables + step
2420         if bounds is not None:
2421             # Attention : boucle infinie à éviter si un intervalle est trop petit
2422             while( (variables < numpy.ravel(numpy.asmatrix(bounds)[:,0])).any() or (variables > numpy.ravel(numpy.asmatrix(bounds)[:,1])).any() ):
2423                 step      = step/2.
2424                 variables = variables - step
2425         residus   = mesures - numpy.ravel( func(variables) )
2426         surrogate = numpy.sum(residus**2 * poids) + (4.*quantile-2.) * numpy.sum(residus)
2427         #
2428         while ( (surrogate > lastsurrogate) and ( max(list(numpy.abs(step))) > 1.e-16 ) ) :
2429             step      = step/2.
2430             variables = variables - step
2431             residus   = mesures - numpy.ravel( func(variables) )
2432             surrogate = numpy.sum(residus**2 * poids) + (4.*quantile-2.) * numpy.sum(residus)
2433         #
2434         increment     = lastsurrogate-surrogate
2435         poids         = 1./(epsilon+numpy.abs(residus))
2436         veps          = 1. - 2. * quantile - residus * poids
2437         lastsurrogate = -numpy.sum(residus * veps) - (1.-2.*quantile)*numpy.sum(residus)
2438     #
2439     # Mesure d'écart
2440     # --------------
2441     Ecart = quantile * numpy.sum(residus) - numpy.sum( residus[residus<0] )
2442     #
2443     return variables, Ecart, [n,p,iteration,increment,0]
2444
2445 # ==============================================================================
2446 def multi3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, oneCycle):
2447     """
2448     3DVAR multi-pas et multi-méthodes
2449     """
2450     #
2451     # Initialisation
2452     if selfA._parameters["EstimationOf"] == "State":
2453         M = EM["Direct"].appliedTo
2454         #
2455         if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
2456             Xn = numpy.ravel(Xb).reshape((-1,1))
2457             selfA.StoredVariables["Analysis"].store( Xn )
2458             if selfA._toStore("APosterioriCovariance"):
2459                 if hasattr(B,"asfullmatrix"):
2460                     selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(Xn.size) )
2461                 else:
2462                     selfA.StoredVariables["APosterioriCovariance"].store( B )
2463             if selfA._toStore("ForecastState"):
2464                 selfA.StoredVariables["ForecastState"].store( Xn )
2465         elif selfA._parameters["nextStep"]:
2466             Xn = selfA._getInternalState("Xn")
2467     else:
2468         Xn = numpy.ravel(Xb).reshape((-1,1))
2469     #
2470     if hasattr(Y,"stepnumber"):
2471         duration = Y.stepnumber()
2472     else:
2473         duration = 2
2474     #
2475     # Multi-pas
2476     for step in range(duration-1):
2477         if hasattr(Y,"store"):
2478             Ynpu = numpy.ravel( Y[step+1] ).reshape((-1,1))
2479         else:
2480             Ynpu = numpy.ravel( Y ).reshape((-1,1))
2481         #
2482         if selfA._parameters["EstimationOf"] == "State": # Forecast
2483             Xn_predicted = M( Xn )
2484             if selfA._toStore("ForecastState"):
2485                 selfA.StoredVariables["ForecastState"].store( Xn_predicted )
2486         elif selfA._parameters["EstimationOf"] == "Parameters": # No forecast
2487             # --- > Par principe, M = Id, Q = 0
2488             Xn_predicted = Xn
2489         Xn_predicted = numpy.ravel(Xn_predicted).reshape((-1,1))
2490         #
2491         oneCycle(selfA, Xn_predicted, Ynpu, U, HO, None, None, R, B, None)
2492         #
2493         Xn = selfA.StoredVariables["Analysis"][-1]
2494         #--------------------------
2495         selfA._setInternalState("Xn", Xn)
2496     #
2497     return 0
2498
2499 # ==============================================================================
2500 def psas3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
2501     """
2502     3DVAR PSAS
2503     """
2504     #
2505     # Initialisations
2506     # ---------------
2507     #
2508     # Opérateurs
2509     Hm = HO["Direct"].appliedTo
2510     #
2511     # Utilisation éventuelle d'un vecteur H(Xb) précalculé
2512     if HO["AppliedInX"] is not None and "HXb" in HO["AppliedInX"]:
2513         HXb = Hm( Xb, HO["AppliedInX"]["HXb"] )
2514     else:
2515         HXb = Hm( Xb )
2516     HXb = numpy.asmatrix(numpy.ravel( HXb )).T
2517     if Y.size != HXb.size:
2518         raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
2519     if max(Y.shape) != max(HXb.shape):
2520         raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
2521     #
2522     if selfA._toStore("JacobianMatrixAtBackground"):
2523         HtMb = HO["Tangent"].asMatrix(ValueForMethodForm = Xb)
2524         HtMb = HtMb.reshape(Y.size,Xb.size) # ADAO & check shape
2525         selfA.StoredVariables["JacobianMatrixAtBackground"].store( HtMb )
2526     #
2527     Ht = HO["Tangent"].asMatrix(Xb)
2528     BHT = B * Ht.T
2529     HBHTpR = R + Ht * BHT
2530     Innovation = Y - HXb
2531     #
2532     # Point de démarrage de l'optimisation
2533     Xini = numpy.zeros(Xb.shape)
2534     #
2535     # Définition de la fonction-coût
2536     # ------------------------------
2537     def CostFunction(w):
2538         _W = numpy.asmatrix(numpy.ravel( w )).T
2539         if selfA._parameters["StoreInternalVariables"] or \
2540             selfA._toStore("CurrentState") or \
2541             selfA._toStore("CurrentOptimum"):
2542             selfA.StoredVariables["CurrentState"].store( Xb + BHT * _W )
2543         if selfA._toStore("SimulatedObservationAtCurrentState") or \
2544             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2545             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( Hm( Xb + BHT * _W ) )
2546         if selfA._toStore("InnovationAtCurrentState"):
2547             selfA.StoredVariables["InnovationAtCurrentState"].store( Innovation )
2548         #
2549         Jb  = float( 0.5 * _W.T * HBHTpR * _W )
2550         Jo  = float( - _W.T * Innovation )
2551         J   = Jb + Jo
2552         #
2553         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
2554         selfA.StoredVariables["CostFunctionJb"].store( Jb )
2555         selfA.StoredVariables["CostFunctionJo"].store( Jo )
2556         selfA.StoredVariables["CostFunctionJ" ].store( J )
2557         if selfA._toStore("IndexOfOptimum") or \
2558             selfA._toStore("CurrentOptimum") or \
2559             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
2560             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
2561             selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
2562             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2563             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2564         if selfA._toStore("IndexOfOptimum"):
2565             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
2566         if selfA._toStore("CurrentOptimum"):
2567             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
2568         if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2569             selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
2570         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
2571             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
2572         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
2573             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
2574         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
2575             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
2576         return J
2577     #
2578     def GradientOfCostFunction(w):
2579         _W = numpy.asmatrix(numpy.ravel( w )).T
2580         GradJb  = HBHTpR * _W
2581         GradJo  = - Innovation
2582         GradJ   = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
2583         return GradJ
2584     #
2585     # Minimisation de la fonctionnelle
2586     # --------------------------------
2587     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
2588     #
2589     if selfA._parameters["Minimizer"] == "LBFGSB":
2590         if "0.19" <= scipy.version.version <= "1.1.0":
2591             import lbfgsbhlt as optimiseur
2592         else:
2593             import scipy.optimize as optimiseur
2594         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
2595             func        = CostFunction,
2596             x0          = Xini,
2597             fprime      = GradientOfCostFunction,
2598             args        = (),
2599             bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
2600             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
2601             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
2602             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
2603             iprint      = selfA._parameters["optiprint"],
2604             )
2605         nfeval = Informations['funcalls']
2606         rc     = Informations['warnflag']
2607     elif selfA._parameters["Minimizer"] == "TNC":
2608         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
2609             func        = CostFunction,
2610             x0          = Xini,
2611             fprime      = GradientOfCostFunction,
2612             args        = (),
2613             bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
2614             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
2615             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
2616             ftol        = selfA._parameters["CostDecrementTolerance"],
2617             messages    = selfA._parameters["optmessages"],
2618             )
2619     elif selfA._parameters["Minimizer"] == "CG":
2620         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
2621             f           = CostFunction,
2622             x0          = Xini,
2623             fprime      = GradientOfCostFunction,
2624             args        = (),
2625             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2626             gtol        = selfA._parameters["GradientNormTolerance"],
2627             disp        = selfA._parameters["optdisp"],
2628             full_output = True,
2629             )
2630     elif selfA._parameters["Minimizer"] == "NCG":
2631         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
2632             f           = CostFunction,
2633             x0          = Xini,
2634             fprime      = GradientOfCostFunction,
2635             args        = (),
2636             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2637             avextol     = selfA._parameters["CostDecrementTolerance"],
2638             disp        = selfA._parameters["optdisp"],
2639             full_output = True,
2640             )
2641     elif selfA._parameters["Minimizer"] == "BFGS":
2642         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
2643             f           = CostFunction,
2644             x0          = Xini,
2645             fprime      = GradientOfCostFunction,
2646             args        = (),
2647             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2648             gtol        = selfA._parameters["GradientNormTolerance"],
2649             disp        = selfA._parameters["optdisp"],
2650             full_output = True,
2651             )
2652     else:
2653         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
2654     #
2655     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2656     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
2657     #
2658     # Correction pour pallier a un bug de TNC sur le retour du Minimum
2659     # ----------------------------------------------------------------
2660     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
2661         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
2662         Minimum = numpy.asmatrix(numpy.ravel( Minimum )).T
2663     else:
2664         Minimum = Xb + BHT * numpy.asmatrix(numpy.ravel( Minimum )).T
2665     #
2666     # Obtention de l'analyse
2667     # ----------------------
2668     Xa = Minimum
2669     #
2670     selfA.StoredVariables["Analysis"].store( Xa )
2671     #
2672     if selfA._toStore("OMA") or \
2673         selfA._toStore("SigmaObs2") or \
2674         selfA._toStore("SimulationQuantiles") or \
2675         selfA._toStore("SimulatedObservationAtOptimum"):
2676         if selfA._toStore("SimulatedObservationAtCurrentState"):
2677             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
2678         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2679             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
2680         else:
2681             HXa = Hm( Xa )
2682     #
2683     # Calcul de la covariance d'analyse
2684     # ---------------------------------
2685     if selfA._toStore("APosterioriCovariance") or \
2686         selfA._toStore("SimulationQuantiles") or \
2687         selfA._toStore("JacobianMatrixAtOptimum") or \
2688         selfA._toStore("KalmanGainAtOptimum"):
2689         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
2690         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
2691     if selfA._toStore("APosterioriCovariance") or \
2692         selfA._toStore("SimulationQuantiles") or \
2693         selfA._toStore("KalmanGainAtOptimum"):
2694         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
2695         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
2696     if selfA._toStore("APosterioriCovariance") or \
2697         selfA._toStore("SimulationQuantiles"):
2698         BI = B.getI()
2699         RI = R.getI()
2700         HessienneI = []
2701         nb = Xa.size
2702         for i in range(nb):
2703             _ee    = numpy.matrix(numpy.zeros(nb)).T
2704             _ee[i] = 1.
2705             _HtEE  = numpy.dot(HtM,_ee)
2706             _HtEE  = numpy.asmatrix(numpy.ravel( _HtEE )).T
2707             HessienneI.append( numpy.ravel( BI*_ee + HaM * (RI * _HtEE) ) )
2708         HessienneI = numpy.matrix( HessienneI )
2709         A = HessienneI.I
2710         if min(A.shape) != max(A.shape):
2711             raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(selfA._name,str(A.shape)))
2712         if (numpy.diag(A) < 0).any():
2713             raise ValueError("The %s a posteriori covariance matrix A has at least one negative value on its diagonal. There is an error in the observation operator, please check it."%(selfA._name,))
2714         if logging.getLogger().level < logging.WARNING: # La verification n'a lieu qu'en debug
2715             try:
2716                 L = numpy.linalg.cholesky( A )
2717             except:
2718                 raise ValueError("The %s a posteriori covariance matrix A is not symmetric positive-definite. Please check your a priori covariances and your observation operator."%(selfA._name,))
2719     if selfA._toStore("APosterioriCovariance"):
2720         selfA.StoredVariables["APosterioriCovariance"].store( A )
2721     if selfA._toStore("JacobianMatrixAtOptimum"):
2722         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
2723     if selfA._toStore("KalmanGainAtOptimum"):
2724         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
2725         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
2726         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
2727     #
2728     # Calculs et/ou stockages supplémentaires
2729     # ---------------------------------------
2730     if selfA._toStore("Innovation") or \
2731         selfA._toStore("SigmaObs2") or \
2732         selfA._toStore("MahalanobisConsistency") or \
2733         selfA._toStore("OMB"):
2734         d  = Y - HXb
2735     if selfA._toStore("Innovation"):
2736         selfA.StoredVariables["Innovation"].store( numpy.ravel(d) )
2737     if selfA._toStore("BMA"):
2738         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
2739     if selfA._toStore("OMA"):
2740         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
2741     if selfA._toStore("OMB"):
2742         selfA.StoredVariables["OMB"].store( numpy.ravel(d) )
2743     if selfA._toStore("SigmaObs2"):
2744         TraceR = R.trace(Y.size)
2745         selfA.StoredVariables["SigmaObs2"].store( float( (d.T * (numpy.asmatrix(numpy.ravel(Y)).T-numpy.asmatrix(numpy.ravel(HXa)).T)) ) / TraceR )
2746     if selfA._toStore("MahalanobisConsistency"):
2747         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
2748     if selfA._toStore("SimulationQuantiles"):
2749         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
2750     if selfA._toStore("SimulatedObservationAtBackground"):
2751         selfA.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
2752     if selfA._toStore("SimulatedObservationAtOptimum"):
2753         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
2754     #
2755     return 0
2756
2757 # ==============================================================================
2758 def senkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula16"):
2759     """
2760     Stochastic EnKF
2761     """
2762     if selfA._parameters["EstimationOf"] == "Parameters":
2763         selfA._parameters["StoreInternalVariables"] = True
2764     #
2765     # Opérateurs
2766     H = HO["Direct"].appliedControledFormTo
2767     #
2768     if selfA._parameters["EstimationOf"] == "State":
2769         M = EM["Direct"].appliedControledFormTo
2770     #
2771     if CM is not None and "Tangent" in CM and U is not None:
2772         Cm = CM["Tangent"].asMatrix(Xb)
2773     else:
2774         Cm = None
2775     #
2776     # Durée d'observation et tailles
2777     if hasattr(Y,"stepnumber"):
2778         duration = Y.stepnumber()
2779         __p = numpy.cumprod(Y.shape())[-1]
2780     else:
2781         duration = 2
2782         __p = numpy.array(Y).size
2783     #
2784     # Précalcul des inversions de B et R
2785     if selfA._parameters["StoreInternalVariables"] \
2786         or selfA._toStore("CostFunctionJ") \
2787         or selfA._toStore("CostFunctionJb") \
2788         or selfA._toStore("CostFunctionJo") \
2789         or selfA._toStore("CurrentOptimum") \
2790         or selfA._toStore("APosterioriCovariance"):
2791         BI = B.getI()
2792         RI = R.getI()
2793     #
2794     __n = Xb.size
2795     __m = selfA._parameters["NumberOfMembers"]
2796     #
2797     if hasattr(R,"asfullmatrix"): Rn = R.asfullmatrix(__p)
2798     else:                         Rn = R
2799     #
2800     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
2801         if hasattr(B,"asfullmatrix"): Pn = B.asfullmatrix(__n)
2802         else:                         Pn = B
2803         Xn = EnsembleOfBackgroundPerturbations( Xb, Pn, __m )
2804         selfA.StoredVariables["Analysis"].store( Xb )
2805         if selfA._toStore("APosterioriCovariance"):
2806             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
2807         selfA._setInternalState("seed", numpy.random.get_state())
2808     elif selfA._parameters["nextStep"]:
2809         Xn = selfA._getInternalState("Xn")
2810     #
2811     previousJMinimum = numpy.finfo(float).max
2812     #
2813     for step in range(duration-1):
2814         numpy.random.set_state(selfA._getInternalState("seed"))
2815         if hasattr(Y,"store"):
2816             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
2817         else:
2818             Ynpu = numpy.ravel( Y ).reshape((__p,1))
2819         #
2820         if U is not None:
2821             if hasattr(U,"store") and len(U)>1:
2822                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
2823             elif hasattr(U,"store") and len(U)==1:
2824                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
2825             else:
2826                 Un = numpy.asmatrix(numpy.ravel( U )).T
2827         else:
2828             Un = None
2829         #
2830         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
2831             Xn = CovarianceInflation( Xn,
2832                 selfA._parameters["InflationType"],
2833                 selfA._parameters["InflationFactor"],
2834                 )
2835         #
2836         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
2837             EMX = M( [(Xn[:,i], Un) for i in range(__m)],
2838                 argsAsSerie = True,
2839                 returnSerieAsArrayMatrix = True )
2840             Xn_predicted = EnsemblePerturbationWithGivenCovariance( EMX, Q )
2841             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
2842                 argsAsSerie = True,
2843                 returnSerieAsArrayMatrix = True )
2844             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
2845                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
2846                 Xn_predicted = Xn_predicted + Cm * Un
2847         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
2848             # --- > Par principe, M = Id, Q = 0
2849             Xn_predicted = EMX = Xn
2850             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
2851                 argsAsSerie = True,
2852                 returnSerieAsArrayMatrix = True )
2853         #
2854         # Mean of forecast and observation of forecast
2855         Xfm  = Xn_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
2856         Hfm  = HX_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
2857         #
2858         #--------------------------
2859         if VariantM == "KalmanFilterFormula05":
2860             PfHT, HPfHT = 0., 0.
2861             for i in range(__m):
2862                 Exfi = Xn_predicted[:,i].reshape((__n,1)) - Xfm
2863                 Eyfi = HX_predicted[:,i].reshape((__p,1)) - Hfm
2864                 PfHT  += Exfi * Eyfi.T
2865                 HPfHT += Eyfi * Eyfi.T
2866             PfHT  = (1./(__m-1)) * PfHT
2867             HPfHT = (1./(__m-1)) * HPfHT
2868             Kn     = PfHT * ( R + HPfHT ).I
2869             del PfHT, HPfHT
2870             #
2871             for i in range(__m):
2872                 ri = numpy.random.multivariate_normal(numpy.zeros(__p), Rn)
2873                 Xn[:,i] = numpy.ravel(Xn_predicted[:,i]) + Kn @ (numpy.ravel(Ynpu) + ri - HX_predicted[:,i])
2874         #--------------------------
2875         elif VariantM == "KalmanFilterFormula16":
2876             EpY   = EnsembleOfCenteredPerturbations(Ynpu, Rn, __m)
2877             EpYm  = EpY.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
2878             #
2879             EaX   = EnsembleOfAnomalies( Xn_predicted ) / math.sqrt(__m-1)
2880             EaY = (HX_predicted - Hfm - EpY + EpYm) / math.sqrt(__m-1)
2881             #
2882             Kn = EaX @ EaY.T @ numpy.linalg.inv( EaY @ EaY.T)
2883             #
2884             for i in range(__m):
2885                 Xn[:,i] = numpy.ravel(Xn_predicted[:,i]) + Kn @ (numpy.ravel(EpY[:,i]) - HX_predicted[:,i])
2886         #--------------------------
2887         else:
2888             raise ValueError("VariantM has to be chosen in the authorized methods list.")
2889         #
2890         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
2891             Xn = CovarianceInflation( Xn,
2892                 selfA._parameters["InflationType"],
2893                 selfA._parameters["InflationFactor"],
2894                 )
2895         #
2896         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
2897         #--------------------------
2898         selfA._setInternalState("Xn", Xn)
2899         selfA._setInternalState("seed", numpy.random.get_state())
2900         #--------------------------
2901         #
2902         if selfA._parameters["StoreInternalVariables"] \
2903             or selfA._toStore("CostFunctionJ") \
2904             or selfA._toStore("CostFunctionJb") \
2905             or selfA._toStore("CostFunctionJo") \
2906             or selfA._toStore("APosterioriCovariance") \
2907             or selfA._toStore("InnovationAtCurrentAnalysis") \
2908             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
2909             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2910             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
2911             _Innovation = Ynpu - _HXa
2912         #
2913         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2914         # ---> avec analysis
2915         selfA.StoredVariables["Analysis"].store( Xa )
2916         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
2917             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
2918         if selfA._toStore("InnovationAtCurrentAnalysis"):
2919             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
2920         # ---> avec current state
2921         if selfA._parameters["StoreInternalVariables"] \
2922             or selfA._toStore("CurrentState"):
2923             selfA.StoredVariables["CurrentState"].store( Xn )
2924         if selfA._toStore("ForecastState"):
2925             selfA.StoredVariables["ForecastState"].store( EMX )
2926         if selfA._toStore("ForecastCovariance"):
2927             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(EMX) )
2928         if selfA._toStore("BMA"):
2929             selfA.StoredVariables["BMA"].store( EMX - Xa )
2930         if selfA._toStore("InnovationAtCurrentState"):
2931             selfA.StoredVariables["InnovationAtCurrentState"].store( - HX_predicted + Ynpu )
2932         if selfA._toStore("SimulatedObservationAtCurrentState") \
2933             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2934             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
2935         # ---> autres
2936         if selfA._parameters["StoreInternalVariables"] \
2937             or selfA._toStore("CostFunctionJ") \
2938             or selfA._toStore("CostFunctionJb") \
2939             or selfA._toStore("CostFunctionJo") \
2940             or selfA._toStore("CurrentOptimum") \
2941             or selfA._toStore("APosterioriCovariance"):
2942             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
2943             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
2944             J   = Jb + Jo
2945             selfA.StoredVariables["CostFunctionJb"].store( Jb )
2946             selfA.StoredVariables["CostFunctionJo"].store( Jo )
2947             selfA.StoredVariables["CostFunctionJ" ].store( J )
2948             #
2949             if selfA._toStore("IndexOfOptimum") \
2950                 or selfA._toStore("CurrentOptimum") \
2951                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
2952                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
2953                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
2954                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2955                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2956             if selfA._toStore("IndexOfOptimum"):
2957                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
2958             if selfA._toStore("CurrentOptimum"):
2959                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
2960             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2961                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
2962             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
2963                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
2964             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
2965                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
2966             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
2967                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
2968         if selfA._toStore("APosterioriCovariance"):
2969             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
2970         if selfA._parameters["EstimationOf"] == "Parameters" \
2971             and J < previousJMinimum:
2972             previousJMinimum    = J
2973             XaMin               = Xa
2974             if selfA._toStore("APosterioriCovariance"):
2975                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
2976         # ---> Pour les smoothers
2977         if selfA._toStore("CurrentEnsembleState"):
2978             selfA.StoredVariables["CurrentEnsembleState"].store( Xn )
2979     #
2980     # Stockage final supplémentaire de l'optimum en estimation de paramètres
2981     # ----------------------------------------------------------------------
2982     if selfA._parameters["EstimationOf"] == "Parameters":
2983         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2984         selfA.StoredVariables["Analysis"].store( XaMin )
2985         if selfA._toStore("APosterioriCovariance"):
2986             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
2987         if selfA._toStore("BMA"):
2988             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
2989     #
2990     return 0
2991
2992 # ==============================================================================
2993 def std3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
2994     """
2995     3DVAR
2996     """
2997     #
2998     # Initialisations
2999     # ---------------
3000     #
3001     # Opérateurs
3002     Hm = HO["Direct"].appliedTo
3003     Ha = HO["Adjoint"].appliedInXTo
3004     #
3005     # Utilisation éventuelle d'un vecteur H(Xb) précalculé
3006     if HO["AppliedInX"] is not None and "HXb" in HO["AppliedInX"]:
3007         HXb = Hm( Xb, HO["AppliedInX"]["HXb"] )
3008     else:
3009         HXb = Hm( Xb )
3010     HXb = HXb.reshape((-1,1))
3011     if Y.size != HXb.size:
3012         raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
3013     if max(Y.shape) != max(HXb.shape):
3014         raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
3015     #
3016     if selfA._toStore("JacobianMatrixAtBackground"):
3017         HtMb = HO["Tangent"].asMatrix(ValueForMethodForm = Xb)
3018         HtMb = HtMb.reshape(Y.size,Xb.size) # ADAO & check shape
3019         selfA.StoredVariables["JacobianMatrixAtBackground"].store( HtMb )
3020     #
3021     # Précalcul des inversions de B et R
3022     BI = B.getI()
3023     RI = R.getI()
3024     #
3025     # Point de démarrage de l'optimisation
3026     Xini = selfA._parameters["InitializationPoint"]
3027     #
3028     # Définition de la fonction-coût
3029     # ------------------------------
3030     def CostFunction(x):
3031         _X  = numpy.ravel( x ).reshape((-1,1))
3032         if selfA._parameters["StoreInternalVariables"] or \
3033             selfA._toStore("CurrentState") or \
3034             selfA._toStore("CurrentOptimum"):
3035             selfA.StoredVariables["CurrentState"].store( _X )
3036         _HX = Hm( _X ).reshape((-1,1))
3037         _Innovation = Y - _HX
3038         if selfA._toStore("SimulatedObservationAtCurrentState") or \
3039             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3040             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
3041         if selfA._toStore("InnovationAtCurrentState"):
3042             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
3043         #
3044         Jb  = float( 0.5 * (_X - Xb).T * (BI * (_X - Xb)) )
3045         Jo  = float( 0.5 * _Innovation.T * (RI * _Innovation) )
3046         J   = Jb + Jo
3047         #
3048         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
3049         selfA.StoredVariables["CostFunctionJb"].store( Jb )
3050         selfA.StoredVariables["CostFunctionJo"].store( Jo )
3051         selfA.StoredVariables["CostFunctionJ" ].store( J )
3052         if selfA._toStore("IndexOfOptimum") or \
3053             selfA._toStore("CurrentOptimum") or \
3054             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
3055             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
3056             selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
3057             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3058             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3059         if selfA._toStore("IndexOfOptimum"):
3060             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3061         if selfA._toStore("CurrentOptimum"):
3062             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
3063         if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3064             selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
3065         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3066             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3067         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3068             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3069         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3070             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3071         return J
3072     #
3073     def GradientOfCostFunction(x):
3074         _X      = numpy.ravel( x ).reshape((-1,1))
3075         _HX     = Hm( _X )
3076         _HX     = numpy.ravel( _HX ).reshape((-1,1))
3077         GradJb  = BI * (_X - Xb)
3078         GradJo  = - Ha( (_X, RI * (Y - _HX)) )
3079         GradJ   = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
3080         return GradJ
3081     #
3082     # Minimisation de la fonctionnelle
3083     # --------------------------------
3084     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
3085     #
3086     if selfA._parameters["Minimizer"] == "LBFGSB":
3087         if "0.19" <= scipy.version.version <= "1.1.0":
3088             import lbfgsbhlt as optimiseur
3089         else:
3090             import scipy.optimize as optimiseur
3091         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
3092             func        = CostFunction,
3093             x0          = Xini,
3094             fprime      = GradientOfCostFunction,
3095             args        = (),
3096             bounds      = selfA._parameters["Bounds"],
3097             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
3098             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
3099             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3100             iprint      = selfA._parameters["optiprint"],
3101             )
3102         nfeval = Informations['funcalls']
3103         rc     = Informations['warnflag']
3104     elif selfA._parameters["Minimizer"] == "TNC":
3105         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
3106             func        = CostFunction,
3107             x0          = Xini,
3108             fprime      = GradientOfCostFunction,
3109             args        = (),
3110             bounds      = selfA._parameters["Bounds"],
3111             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
3112             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3113             ftol        = selfA._parameters["CostDecrementTolerance"],
3114             messages    = selfA._parameters["optmessages"],
3115             )
3116     elif selfA._parameters["Minimizer"] == "CG":
3117         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
3118             f           = CostFunction,
3119             x0          = Xini,
3120             fprime      = GradientOfCostFunction,
3121             args        = (),
3122             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3123             gtol        = selfA._parameters["GradientNormTolerance"],
3124             disp        = selfA._parameters["optdisp"],
3125             full_output = True,
3126             )
3127     elif selfA._parameters["Minimizer"] == "NCG":
3128         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
3129             f           = CostFunction,
3130             x0          = Xini,
3131             fprime      = GradientOfCostFunction,
3132             args        = (),
3133             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3134             avextol     = selfA._parameters["CostDecrementTolerance"],
3135             disp        = selfA._parameters["optdisp"],
3136             full_output = True,
3137             )
3138     elif selfA._parameters["Minimizer"] == "BFGS":
3139         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
3140             f           = CostFunction,
3141             x0          = Xini,
3142             fprime      = GradientOfCostFunction,
3143             args        = (),
3144             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3145             gtol        = selfA._parameters["GradientNormTolerance"],
3146             disp        = selfA._parameters["optdisp"],
3147             full_output = True,
3148             )
3149     else:
3150         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
3151     #
3152     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3153     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
3154     #
3155     # Correction pour pallier a un bug de TNC sur le retour du Minimum
3156     # ----------------------------------------------------------------
3157     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
3158         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
3159     #
3160     Xa = Minimum
3161     #--------------------------
3162     #
3163     selfA.StoredVariables["Analysis"].store( Xa )
3164     #
3165     if selfA._toStore("OMA") or \
3166         selfA._toStore("SigmaObs2") or \
3167         selfA._toStore("SimulationQuantiles") or \
3168         selfA._toStore("SimulatedObservationAtOptimum"):
3169         if selfA._toStore("SimulatedObservationAtCurrentState"):
3170             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
3171         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3172             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
3173         else:
3174             HXa = Hm( Xa )
3175     #
3176     if selfA._toStore("APosterioriCovariance") or \
3177         selfA._toStore("SimulationQuantiles") or \
3178         selfA._toStore("JacobianMatrixAtOptimum") or \
3179         selfA._toStore("KalmanGainAtOptimum"):
3180         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
3181         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
3182     if selfA._toStore("APosterioriCovariance") or \
3183         selfA._toStore("SimulationQuantiles") or \
3184         selfA._toStore("KalmanGainAtOptimum"):
3185         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
3186         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
3187     if selfA._toStore("APosterioriCovariance") or \
3188         selfA._toStore("SimulationQuantiles"):
3189         HessienneI = []
3190         nb = Xa.size
3191         for i in range(nb):
3192             _ee    = numpy.zeros(nb)
3193             _ee[i] = 1.
3194             _HtEE  = numpy.dot(HtM,_ee)
3195             HessienneI.append( numpy.ravel( BI * _ee.reshape((-1,1)) + HaM * (RI * _HtEE.reshape((-1,1))) ) )
3196         A = numpy.linalg.inv(numpy.array( HessienneI ))
3197         if min(A.shape) != max(A.shape):
3198             raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(selfA._name,str(A.shape)))
3199         if (numpy.diag(A) < 0).any():
3200             raise ValueError("The %s a posteriori covariance matrix A has at least one negative value on its diagonal. There is an error in the observation operator, please check it."%(selfA._name,))
3201         if logging.getLogger().level < logging.WARNING: # La verification n'a lieu qu'en debug
3202             try:
3203                 L = numpy.linalg.cholesky( A )
3204             except:
3205                 raise ValueError("The %s a posteriori covariance matrix A is not symmetric positive-definite. Please check your a priori covariances and your observation operator."%(selfA._name,))
3206     if selfA._toStore("APosterioriCovariance"):
3207         selfA.StoredVariables["APosterioriCovariance"].store( A )
3208     if selfA._toStore("JacobianMatrixAtOptimum"):
3209         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
3210     if selfA._toStore("KalmanGainAtOptimum"):
3211         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
3212         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
3213         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
3214     #
3215     if selfA._toStore("Innovation") or \
3216         selfA._toStore("SigmaObs2") or \
3217         selfA._toStore("MahalanobisConsistency") or \
3218         selfA._toStore("OMB"):
3219         d  = Y - HXb
3220     if selfA._toStore("Innovation"):
3221         selfA.StoredVariables["Innovation"].store( d )
3222     if selfA._toStore("BMA"):
3223         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
3224     if selfA._toStore("OMA"):
3225         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
3226     if selfA._toStore("OMB"):
3227         selfA.StoredVariables["OMB"].store( d )
3228     if selfA._toStore("SigmaObs2"):
3229         TraceR = R.trace(Y.size)
3230         selfA.StoredVariables["SigmaObs2"].store( float( (d.T @ (numpy.ravel(Y)-numpy.ravel(HXa))) ) / TraceR )
3231     if selfA._toStore("MahalanobisConsistency"):
3232         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
3233     if selfA._toStore("SimulationQuantiles"):
3234         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
3235     if selfA._toStore("SimulatedObservationAtBackground"):
3236         selfA.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
3237     if selfA._toStore("SimulatedObservationAtOptimum"):
3238         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
3239     #
3240     return 0
3241
3242 # ==============================================================================
3243 def std4dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3244     """
3245     4DVAR
3246     """
3247     #
3248     # Initialisations
3249     # ---------------
3250     #
3251     # Opérateurs
3252     Hm = HO["Direct"].appliedControledFormTo
3253     Mm = EM["Direct"].appliedControledFormTo
3254     #
3255     if CM is not None and "Tangent" in CM and U is not None:
3256         Cm = CM["Tangent"].asMatrix(Xb)
3257     else:
3258         Cm = None
3259     #
3260     def Un(_step):
3261         if U is not None:
3262             if hasattr(U,"store") and 1<=_step<len(U) :
3263                 _Un = numpy.asmatrix(numpy.ravel( U[_step] )).T
3264             elif hasattr(U,"store") and len(U)==1:
3265                 _Un = numpy.asmatrix(numpy.ravel( U[0] )).T
3266             else:
3267                 _Un = numpy.asmatrix(numpy.ravel( U )).T
3268         else:
3269             _Un = None
3270         return _Un
3271     def CmUn(_xn,_un):
3272         if Cm is not None and _un is not None: # Attention : si Cm est aussi dans M, doublon !
3273             _Cm   = Cm.reshape(_xn.size,_un.size) # ADAO & check shape
3274             _CmUn = _Cm * _un
3275         else:
3276             _CmUn = 0.
3277         return _CmUn
3278     #
3279     # Remarque : les observations sont exploitées à partir du pas de temps
3280     # numéro 1, et sont utilisées dans Yo comme rangées selon ces indices.
3281     # Donc le pas 0 n'est pas utilisé puisque la première étape commence
3282     # avec l'observation du pas 1.
3283     #
3284     # Nombre de pas identique au nombre de pas d'observations
3285     if hasattr(Y,"stepnumber"):
3286         duration = Y.stepnumber()
3287     else:
3288         duration = 2
3289     #
3290     # Précalcul des inversions de B et R
3291     BI = B.getI()
3292     RI = R.getI()
3293     #
3294     # Point de démarrage de l'optimisation
3295     Xini = selfA._parameters["InitializationPoint"]
3296     #
3297     # Définition de la fonction-coût
3298     # ------------------------------
3299     selfA.DirectCalculation = [None,] # Le pas 0 n'est pas observé
3300     selfA.DirectInnovation  = [None,] # Le pas 0 n'est pas observé
3301     def CostFunction(x):
3302         _X  = numpy.asmatrix(numpy.ravel( x )).T
3303         if selfA._parameters["StoreInternalVariables"] or \
3304             selfA._toStore("CurrentState") or \
3305             selfA._toStore("CurrentOptimum"):
3306             selfA.StoredVariables["CurrentState"].store( _X )
3307         Jb  = float( 0.5 * (_X - Xb).T * BI * (_X - Xb) )
3308         selfA.DirectCalculation = [None,]
3309         selfA.DirectInnovation  = [None,]
3310         Jo  = 0.
3311         _Xn = _X
3312         for step in range(0,duration-1):
3313             if hasattr(Y,"store"):
3314                 _Ynpu = numpy.asmatrix(numpy.ravel( Y[step+1] )).T
3315             else:
3316                 _Ynpu = numpy.asmatrix(numpy.ravel( Y )).T
3317             _Un = Un(step)
3318             #
3319             # Etape d'évolution
3320             if selfA._parameters["EstimationOf"] == "State":
3321                 _Xn = Mm( (_Xn, _Un) ) + CmUn(_Xn, _Un)
3322             elif selfA._parameters["EstimationOf"] == "Parameters":
3323                 pass
3324             #
3325             if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3326                 __Bounds = ForceNumericBounds( selfA._parameters["Bounds"] )
3327                 _Xn = numpy.max(numpy.hstack((_Xn,numpy.asmatrix(__Bounds)[:,0])),axis=1)
3328                 _Xn = numpy.min(numpy.hstack((_Xn,numpy.asmatrix(__Bounds)[:,1])),axis=1)
3329             #
3330             # Etape de différence aux observations
3331             if selfA._parameters["EstimationOf"] == "State":
3332                 _YmHMX = _Ynpu - numpy.asmatrix(numpy.ravel( Hm( (_Xn, None) ) )).T
3333             elif selfA._parameters["EstimationOf"] == "Parameters":
3334                 _YmHMX = _Ynpu - numpy.asmatrix(numpy.ravel( Hm( (_Xn, _Un) ) )).T - CmUn(_Xn, _Un)
3335             #
3336             # Stockage de l'état
3337             selfA.DirectCalculation.append( _Xn )
3338             selfA.DirectInnovation.append( _YmHMX )
3339             #
3340             # Ajout dans la fonctionnelle d'observation
3341             Jo = Jo + 0.5 * float( _YmHMX.T * RI * _YmHMX )
3342         J = Jb + Jo
3343         #
3344         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
3345         selfA.StoredVariables["CostFunctionJb"].store( Jb )
3346         selfA.StoredVariables["CostFunctionJo"].store( Jo )
3347         selfA.StoredVariables["CostFunctionJ" ].store( J )
3348         if selfA._toStore("IndexOfOptimum") or \
3349             selfA._toStore("CurrentOptimum") or \
3350             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
3351             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
3352             selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3353             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3354         if selfA._toStore("IndexOfOptimum"):
3355             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3356         if selfA._toStore("CurrentOptimum"):
3357             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
3358         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3359             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3360         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3361             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3362         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3363             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3364         return J
3365     #
3366     def GradientOfCostFunction(x):
3367         _X      = numpy.asmatrix(numpy.ravel( x )).T
3368         GradJb  = BI * (_X - Xb)
3369         GradJo  = 0.
3370         for step in range(duration-1,0,-1):
3371             # Étape de récupération du dernier stockage de l'évolution
3372             _Xn = selfA.DirectCalculation.pop()
3373             # Étape de récupération du dernier stockage de l'innovation
3374             _YmHMX = selfA.DirectInnovation.pop()
3375             # Calcul des adjoints
3376             Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = _Xn)
3377             Ha = Ha.reshape(_Xn.size,_YmHMX.size) # ADAO & check shape
3378             Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = _Xn)
3379             Ma = Ma.reshape(_Xn.size,_Xn.size) # ADAO & check shape
3380             # Calcul du gradient par état adjoint
3381             GradJo = GradJo + Ha * RI * _YmHMX # Équivaut pour Ha linéaire à : Ha( (_Xn, RI * _YmHMX) )
3382             GradJo = Ma * GradJo               # Équivaut pour Ma linéaire à : Ma( (_Xn, GradJo) )
3383         GradJ = numpy.ravel( GradJb ) - numpy.ravel( GradJo )
3384         return GradJ
3385     #
3386     # Minimisation de la fonctionnelle
3387     # --------------------------------
3388     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
3389     #
3390     if selfA._parameters["Minimizer"] == "LBFGSB":
3391         if "0.19" <= scipy.version.version <= "1.1.0":
3392             import lbfgsbhlt as optimiseur
3393         else:
3394             import scipy.optimize as optimiseur
3395         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
3396             func        = CostFunction,
3397             x0          = Xini,
3398             fprime      = GradientOfCostFunction,
3399             args        = (),
3400             bounds      = selfA._parameters["Bounds"],
3401             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
3402             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
3403             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3404             iprint      = selfA._parameters["optiprint"],
3405             )
3406         nfeval = Informations['funcalls']
3407         rc     = Informations['warnflag']
3408     elif selfA._parameters["Minimizer"] == "TNC":
3409         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
3410             func        = CostFunction,
3411             x0          = Xini,
3412             fprime      = GradientOfCostFunction,
3413             args        = (),
3414             bounds      = selfA._parameters["Bounds"],
3415             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
3416             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3417             ftol        = selfA._parameters["CostDecrementTolerance"],
3418             messages    = selfA._parameters["optmessages"],
3419             )
3420     elif selfA._parameters["Minimizer"] == "CG":
3421         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
3422             f           = CostFunction,
3423             x0          = Xini,
3424             fprime      = GradientOfCostFunction,
3425             args        = (),
3426             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3427             gtol        = selfA._parameters["GradientNormTolerance"],
3428             disp        = selfA._parameters["optdisp"],
3429             full_output = True,
3430             )
3431     elif selfA._parameters["Minimizer"] == "NCG":
3432         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
3433             f           = CostFunction,
3434             x0          = Xini,
3435             fprime      = GradientOfCostFunction,
3436             args        = (),
3437             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3438             avextol     = selfA._parameters["CostDecrementTolerance"],
3439             disp        = selfA._parameters["optdisp"],
3440             full_output = True,
3441             )
3442     elif selfA._parameters["Minimizer"] == "BFGS":
3443         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
3444             f           = CostFunction,
3445             x0          = Xini,
3446             fprime      = GradientOfCostFunction,
3447             args        = (),
3448             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3449             gtol        = selfA._parameters["GradientNormTolerance"],
3450             disp        = selfA._parameters["optdisp"],
3451             full_output = True,
3452             )
3453     else:
3454         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
3455     #
3456     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3457     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
3458     #
3459     # Correction pour pallier a un bug de TNC sur le retour du Minimum
3460     # ----------------------------------------------------------------
3461     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
3462         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
3463     #
3464     # Obtention de l'analyse
3465     # ----------------------
3466     Xa = numpy.asmatrix(numpy.ravel( Minimum )).T
3467     #
3468     selfA.StoredVariables["Analysis"].store( Xa )
3469     #
3470     # Calculs et/ou stockages supplémentaires
3471     # ---------------------------------------
3472     if selfA._toStore("BMA"):
3473         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
3474     #
3475     return 0
3476
3477 # ==============================================================================
3478 def stdkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3479     """
3480     Standard Kalman Filter
3481     """
3482     if selfA._parameters["EstimationOf"] == "Parameters":
3483         selfA._parameters["StoreInternalVariables"] = True
3484     #
3485     # Opérateurs
3486     # ----------
3487     Ht = HO["Tangent"].asMatrix(Xb)
3488     Ha = HO["Adjoint"].asMatrix(Xb)
3489     #
3490     if selfA._parameters["EstimationOf"] == "State":
3491         Mt = EM["Tangent"].asMatrix(Xb)
3492         Ma = EM["Adjoint"].asMatrix(Xb)
3493     #
3494     if CM is not None and "Tangent" in CM and U is not None:
3495         Cm = CM["Tangent"].asMatrix(Xb)
3496     else:
3497         Cm = None
3498     #
3499     # Durée d'observation et tailles
3500     if hasattr(Y,"stepnumber"):
3501         duration = Y.stepnumber()
3502         __p = numpy.cumprod(Y.shape())[-1]
3503     else:
3504         duration = 2
3505         __p = numpy.array(Y).size
3506     #
3507     # Précalcul des inversions de B et R
3508     if selfA._parameters["StoreInternalVariables"] \
3509         or selfA._toStore("CostFunctionJ") \
3510         or selfA._toStore("CostFunctionJb") \
3511         or selfA._toStore("CostFunctionJo") \
3512         or selfA._toStore("CurrentOptimum") \
3513         or selfA._toStore("APosterioriCovariance"):
3514         BI = B.getI()
3515         RI = R.getI()
3516     #
3517     __n = Xb.size
3518     #
3519     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
3520         Xn = Xb
3521         Pn = B
3522         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3523         selfA.StoredVariables["Analysis"].store( Xb )
3524         if selfA._toStore("APosterioriCovariance"):
3525             if hasattr(B,"asfullmatrix"):
3526                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
3527             else:
3528                 selfA.StoredVariables["APosterioriCovariance"].store( B )
3529         selfA._setInternalState("seed", numpy.random.get_state())
3530     elif selfA._parameters["nextStep"]:
3531         Xn = selfA._getInternalState("Xn")
3532         Pn = selfA._getInternalState("Pn")
3533     #
3534     if selfA._parameters["EstimationOf"] == "Parameters":
3535         XaMin            = Xn
3536         previousJMinimum = numpy.finfo(float).max
3537     #
3538     for step in range(duration-1):
3539         if hasattr(Y,"store"):
3540             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
3541         else:
3542             Ynpu = numpy.ravel( Y ).reshape((__p,1))
3543         #
3544         if U is not None:
3545             if hasattr(U,"store") and len(U)>1:
3546                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
3547             elif hasattr(U,"store") and len(U)==1:
3548                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
3549             else:
3550                 Un = numpy.asmatrix(numpy.ravel( U )).T
3551         else:
3552             Un = None
3553         #
3554         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
3555             Xn_predicted = Mt * Xn
3556             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
3557                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
3558                 Xn_predicted = Xn_predicted + Cm * Un
3559             Pn_predicted = Q + Mt * Pn * Ma
3560         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
3561             # --- > Par principe, M = Id, Q = 0
3562             Xn_predicted = Xn
3563             Pn_predicted = Pn
3564         #
3565         if selfA._parameters["EstimationOf"] == "State":
3566             HX_predicted = Ht * Xn_predicted
3567             _Innovation  = Ynpu - HX_predicted
3568         elif selfA._parameters["EstimationOf"] == "Parameters":
3569             HX_predicted = Ht * Xn_predicted
3570             _Innovation  = Ynpu - HX_predicted
3571             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
3572                 _Innovation = _Innovation - Cm * Un
3573         #
3574         Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
3575         Xn = Xn_predicted + Kn * _Innovation
3576         Pn = Pn_predicted - Kn * Ht * Pn_predicted
3577         #
3578         Xa = Xn # Pointeurs
3579         #--------------------------
3580         selfA._setInternalState("Xn", Xn)
3581         selfA._setInternalState("Pn", Pn)
3582         #--------------------------
3583         #
3584         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3585         # ---> avec analysis
3586         selfA.StoredVariables["Analysis"].store( Xa )
3587         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
3588             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( Ht * Xa )
3589         if selfA._toStore("InnovationAtCurrentAnalysis"):
3590             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
3591         # ---> avec current state
3592         if selfA._parameters["StoreInternalVariables"] \
3593             or selfA._toStore("CurrentState"):
3594             selfA.StoredVariables["CurrentState"].store( Xn )
3595         if selfA._toStore("ForecastState"):
3596             selfA.StoredVariables["ForecastState"].store( Xn_predicted )
3597         if selfA._toStore("ForecastCovariance"):
3598             selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted )
3599         if selfA._toStore("BMA"):
3600             selfA.StoredVariables["BMA"].store( Xn_predicted - Xa )
3601         if selfA._toStore("InnovationAtCurrentState"):
3602             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
3603         if selfA._toStore("SimulatedObservationAtCurrentState") \
3604             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3605             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
3606         # ---> autres
3607         if selfA._parameters["StoreInternalVariables"] \
3608             or selfA._toStore("CostFunctionJ") \
3609             or selfA._toStore("CostFunctionJb") \
3610             or selfA._toStore("CostFunctionJo") \
3611             or selfA._toStore("CurrentOptimum") \
3612             or selfA._toStore("APosterioriCovariance"):
3613             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
3614             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
3615             J   = Jb + Jo
3616             selfA.StoredVariables["CostFunctionJb"].store( Jb )
3617             selfA.StoredVariables["CostFunctionJo"].store( Jo )
3618             selfA.StoredVariables["CostFunctionJ" ].store( J )
3619             #
3620             if selfA._toStore("IndexOfOptimum") \
3621                 or selfA._toStore("CurrentOptimum") \
3622                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
3623                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
3624                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
3625                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3626                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3627             if selfA._toStore("IndexOfOptimum"):
3628                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3629             if selfA._toStore("CurrentOptimum"):
3630                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
3631             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3632                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
3633             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3634                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3635             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3636                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3637             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3638                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3639         if selfA._toStore("APosterioriCovariance"):
3640             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
3641         if selfA._parameters["EstimationOf"] == "Parameters" \
3642             and J < previousJMinimum:
3643             previousJMinimum    = J
3644             XaMin               = Xa
3645             if selfA._toStore("APosterioriCovariance"):
3646                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
3647     #
3648     # Stockage final supplémentaire de l'optimum en estimation de paramètres
3649     # ----------------------------------------------------------------------
3650     if selfA._parameters["EstimationOf"] == "Parameters":
3651         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3652         selfA.StoredVariables["Analysis"].store( XaMin )
3653         if selfA._toStore("APosterioriCovariance"):
3654             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
3655         if selfA._toStore("BMA"):
3656             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
3657     #
3658     return 0
3659
3660 # ==============================================================================
3661 def uckf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3662     """
3663     Constrained Unscented Kalman Filter
3664     """
3665     if selfA._parameters["EstimationOf"] == "Parameters":
3666         selfA._parameters["StoreInternalVariables"] = True
3667     selfA._parameters["Bounds"] = ForceNumericBounds( selfA._parameters["Bounds"] )
3668     #
3669     L     = Xb.size
3670     Alpha = selfA._parameters["Alpha"]
3671     Beta  = selfA._parameters["Beta"]
3672     if selfA._parameters["Kappa"] == 0:
3673         if selfA._parameters["EstimationOf"] == "State":
3674             Kappa = 0
3675         elif selfA._parameters["EstimationOf"] == "Parameters":
3676             Kappa = 3 - L
3677     else:
3678         Kappa = selfA._parameters["Kappa"]
3679     Lambda = float( Alpha**2 ) * ( L + Kappa ) - L
3680     Gamma  = math.sqrt( L + Lambda )
3681     #
3682     Ww = []
3683     Ww.append( 0. )
3684     for i in range(2*L):
3685         Ww.append( 1. / (2.*(L + Lambda)) )
3686     #
3687     Wm = numpy.array( Ww )
3688     Wm[0] = Lambda / (L + Lambda)
3689     Wc = numpy.array( Ww )
3690     Wc[0] = Lambda / (L + Lambda) + (1. - Alpha**2 + Beta)
3691     #
3692     # Opérateurs
3693     Hm = HO["Direct"].appliedControledFormTo
3694     #
3695     if selfA._parameters["EstimationOf"] == "State":
3696         Mm = EM["Direct"].appliedControledFormTo
3697     #
3698     if CM is not None and "Tangent" in CM and U is not None:
3699         Cm = CM["Tangent"].asMatrix(Xb)
3700     else:
3701         Cm = None
3702     #
3703     # Durée d'observation et tailles
3704     if hasattr(Y,"stepnumber"):
3705         duration = Y.stepnumber()
3706         __p = numpy.cumprod(Y.shape())[-1]
3707     else:
3708         duration = 2
3709         __p = numpy.array(Y).size
3710     #
3711     # Précalcul des inversions de B et R
3712     if selfA._parameters["StoreInternalVariables"] \
3713         or selfA._toStore("CostFunctionJ") \
3714         or selfA._toStore("CostFunctionJb") \
3715         or selfA._toStore("CostFunctionJo") \
3716         or selfA._toStore("CurrentOptimum") \
3717         or selfA._toStore("APosterioriCovariance"):
3718         BI = B.getI()
3719         RI = R.getI()
3720     #
3721     __n = Xb.size
3722     #
3723     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
3724         Xn = Xb
3725         if hasattr(B,"asfullmatrix"):
3726             Pn = B.asfullmatrix(__n)
3727         else:
3728             Pn = B
3729         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3730         selfA.StoredVariables["Analysis"].store( Xb )
3731         if selfA._toStore("APosterioriCovariance"):
3732             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
3733     elif selfA._parameters["nextStep"]:
3734         Xn = selfA._getInternalState("Xn")
3735         Pn = selfA._getInternalState("Pn")
3736     #
3737     if selfA._parameters["EstimationOf"] == "Parameters":
3738         XaMin            = Xn
3739         previousJMinimum = numpy.finfo(float).max
3740     #
3741     for step in range(duration-1):
3742         if hasattr(Y,"store"):
3743             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
3744         else:
3745             Ynpu = numpy.ravel( Y ).reshape((__p,1))
3746         #
3747         if U is not None:
3748             if hasattr(U,"store") and len(U)>1:
3749                 Un = numpy.ravel( U[step] ).reshape((-1,1))
3750             elif hasattr(U,"store") and len(U)==1:
3751                 Un = numpy.ravel( U[0] ).reshape((-1,1))
3752             else:
3753                 Un = numpy.ravel( U ).reshape((-1,1))
3754         else:
3755             Un = None
3756         #
3757         Pndemi = numpy.linalg.cholesky(Pn)
3758         Xnp = numpy.hstack([Xn, Xn+Gamma*Pndemi, Xn-Gamma*Pndemi])
3759         nbSpts = 2*Xn.size+1
3760         #
3761         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3762             for point in range(nbSpts):
3763                 Xnp[:,point] = numpy.max(numpy.hstack((Xnp[:,point],numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
3764                 Xnp[:,point] = numpy.min(numpy.hstack((Xnp[:,point],numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
3765         #
3766         XEtnnp = []
3767         for point in range(nbSpts):
3768             if selfA._parameters["EstimationOf"] == "State":
3769                 XEtnnpi = numpy.asmatrix(numpy.ravel( Mm( (Xnp[:,point], Un) ) )).T
3770                 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
3771                     Cm = Cm.reshape(Xn.size,Un.size) # ADAO & check shape
3772                     XEtnnpi = XEtnnpi + Cm * Un
3773                 if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3774                     XEtnnpi = numpy.max(numpy.hstack((XEtnnpi,numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
3775                     XEtnnpi = numpy.min(numpy.hstack((XEtnnpi,numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
3776             elif selfA._parameters["EstimationOf"] == "Parameters":
3777                 # --- > Par principe, M = Id, Q = 0
3778                 XEtnnpi = Xnp[:,point]
3779             XEtnnp.append( XEtnnpi )
3780         XEtnnp = numpy.hstack( XEtnnp )
3781         #
3782         Xncm = numpy.matrix( XEtnnp.getA()*numpy.array(Wm) ).sum(axis=1)
3783         #
3784         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3785             Xncm = numpy.max(numpy.hstack((Xncm,numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
3786             Xncm = numpy.min(numpy.hstack((Xncm,numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
3787         #
3788         if selfA._parameters["EstimationOf"] == "State":        Pnm = Q
3789         elif selfA._parameters["EstimationOf"] == "Parameters": Pnm = 0.
3790         for point in range(nbSpts):
3791             Pnm += Wc[i] * (XEtnnp[:,point]-Xncm) * (XEtnnp[:,point]-Xncm).T
3792         #
3793         if selfA._parameters["EstimationOf"] == "Parameters" and selfA._parameters["Bounds"] is not None:
3794             Pnmdemi = selfA._parameters["Reconditioner"] * numpy.linalg.cholesky(Pnm)
3795         else:
3796             Pnmdemi = numpy.linalg.cholesky(Pnm)
3797         #
3798         Xnnp = numpy.hstack([Xncm, Xncm+Gamma*Pnmdemi, Xncm-Gamma*Pnmdemi])
3799         #
3800         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3801             for point in range(nbSpts):
3802                 Xnnp[:,point] = numpy.max(numpy.hstack((Xnnp[:,point],numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
3803                 Xnnp[:,point] = numpy.min(numpy.hstack((Xnnp[:,point],numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
3804         #
3805         Ynnp = []
3806         for point in range(nbSpts):
3807             if selfA._parameters["EstimationOf"] == "State":
3808                 Ynnpi = numpy.asmatrix(numpy.ravel( Hm( (Xnnp[:,point], None) ) )).T
3809             elif selfA._parameters["EstimationOf"] == "Parameters":
3810                 Ynnpi = numpy.asmatrix(numpy.ravel( Hm( (Xnnp[:,point], Un) ) )).T
3811             Ynnp.append( Ynnpi )
3812         Ynnp = numpy.hstack( Ynnp )
3813         #
3814         Yncm = numpy.matrix( Ynnp.getA()*numpy.array(Wm) ).sum(axis=1)
3815         #
3816         Pyyn = R
3817         Pxyn = 0.
3818         for point in range(nbSpts):
3819             Pyyn += Wc[i] * (Ynnp[:,point]-Yncm) * (Ynnp[:,point]-Yncm).T
3820             Pxyn += Wc[i] * (Xnnp[:,point]-Xncm) * (Ynnp[:,point]-Yncm).T
3821         #
3822         _Innovation  = Ynpu - Yncm
3823         if selfA._parameters["EstimationOf"] == "Parameters":
3824             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
3825                 _Innovation = _Innovation - Cm * Un
3826         #
3827         Kn = Pxyn * Pyyn.I
3828         Xn = Xncm + Kn * _Innovation
3829         Pn = Pnm - Kn * Pyyn * Kn.T
3830         #
3831         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3832             Xn = numpy.max(numpy.hstack((Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
3833             Xn = numpy.min(numpy.hstack((Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
3834         #
3835         Xa = Xn # Pointeurs
3836         #--------------------------
3837         selfA._setInternalState("Xn", Xn)
3838         selfA._setInternalState("Pn", Pn)
3839         #--------------------------
3840         #
3841         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3842         # ---> avec analysis
3843         selfA.StoredVariables["Analysis"].store( Xa )
3844         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
3845             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( Hm((Xa, Un)) )
3846         if selfA._toStore("InnovationAtCurrentAnalysis"):
3847             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
3848         # ---> avec current state
3849         if selfA._parameters["StoreInternalVariables"] \
3850             or selfA._toStore("CurrentState"):
3851             selfA.StoredVariables["CurrentState"].store( Xn )
3852         if selfA._toStore("ForecastState"):
3853             selfA.StoredVariables["ForecastState"].store( Xncm )
3854         if selfA._toStore("ForecastCovariance"):
3855             selfA.StoredVariables["ForecastCovariance"].store( Pnm )
3856         if selfA._toStore("BMA"):
3857             selfA.StoredVariables["BMA"].store( Xncm - Xa )
3858         if selfA._toStore("InnovationAtCurrentState"):
3859             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
3860         if selfA._toStore("SimulatedObservationAtCurrentState") \
3861             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3862             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( Yncm )
3863         # ---> autres
3864         if selfA._parameters["StoreInternalVariables"] \
3865             or selfA._toStore("CostFunctionJ") \
3866             or selfA._toStore("CostFunctionJb") \
3867             or selfA._toStore("CostFunctionJo") \
3868             or selfA._toStore("CurrentOptimum") \
3869             or selfA._toStore("APosterioriCovariance"):
3870             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
3871             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
3872             J   = Jb + Jo
3873             selfA.StoredVariables["CostFunctionJb"].store( Jb )
3874             selfA.StoredVariables["CostFunctionJo"].store( Jo )
3875             selfA.StoredVariables["CostFunctionJ" ].store( J )
3876             #
3877             if selfA._toStore("IndexOfOptimum") \
3878                 or selfA._toStore("CurrentOptimum") \
3879                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
3880                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
3881                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
3882                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3883                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3884             if selfA._toStore("IndexOfOptimum"):
3885                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3886             if selfA._toStore("CurrentOptimum"):
3887                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
3888             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3889                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
3890             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3891                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3892             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3893                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3894             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3895                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3896         if selfA._toStore("APosterioriCovariance"):
3897             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
3898         if selfA._parameters["EstimationOf"] == "Parameters" \
3899             and J < previousJMinimum:
3900             previousJMinimum    = J
3901             XaMin               = Xa
3902             if selfA._toStore("APosterioriCovariance"):
3903                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
3904     #
3905     # Stockage final supplémentaire de l'optimum en estimation de paramètres
3906     # ----------------------------------------------------------------------
3907     if selfA._parameters["EstimationOf"] == "Parameters":
3908         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3909         selfA.StoredVariables["Analysis"].store( XaMin )
3910         if selfA._toStore("APosterioriCovariance"):
3911             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
3912         if selfA._toStore("BMA"):
3913             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
3914     #
3915     return 0
3916
3917 # ==============================================================================
3918 def van3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3919     """
3920     3DVAR variational analysis with no inversion of B
3921     """
3922     #
3923     # Initialisations
3924     # ---------------
3925     #
3926     # Opérateurs
3927     Hm = HO["Direct"].appliedTo
3928     Ha = HO["Adjoint"].appliedInXTo
3929     #
3930     # Précalcul des inversions de B et R
3931     BT = B.getT()
3932     RI = R.getI()
3933     #
3934     # Point de démarrage de l'optimisation
3935     Xini = numpy.zeros(Xb.shape)
3936     #
3937     # Définition de la fonction-coût
3938     # ------------------------------
3939     def CostFunction(v):
3940         _V = numpy.asmatrix(numpy.ravel( v )).T
3941         _X = Xb + B * _V
3942         if selfA._parameters["StoreInternalVariables"] or \
3943             selfA._toStore("CurrentState") or \
3944             selfA._toStore("CurrentOptimum"):
3945             selfA.StoredVariables["CurrentState"].store( _X )
3946         _HX = Hm( _X )
3947         _HX = numpy.asmatrix(numpy.ravel( _HX )).T
3948         _Innovation = Y - _HX
3949         if selfA._toStore("SimulatedObservationAtCurrentState") or \
3950             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3951             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
3952         if selfA._toStore("InnovationAtCurrentState"):
3953             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
3954         #
3955         Jb  = float( 0.5 * _V.T * BT * _V )
3956         Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
3957         J   = Jb + Jo
3958         #
3959         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
3960         selfA.StoredVariables["CostFunctionJb"].store( Jb )
3961         selfA.StoredVariables["CostFunctionJo"].store( Jo )
3962         selfA.StoredVariables["CostFunctionJ" ].store( J )
3963         if selfA._toStore("IndexOfOptimum") or \
3964             selfA._toStore("CurrentOptimum") or \
3965             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
3966             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
3967             selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
3968             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3969             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3970         if selfA._toStore("IndexOfOptimum"):
3971             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3972         if selfA._toStore("CurrentOptimum"):
3973             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
3974         if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3975             selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
3976         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3977             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3978         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3979             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3980         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3981             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3982         return J
3983     #
3984     def GradientOfCostFunction(v):
3985         _V = numpy.asmatrix(numpy.ravel( v )).T
3986         _X = Xb + B * _V
3987         _HX     = Hm( _X )
3988         _HX     = numpy.asmatrix(numpy.ravel( _HX )).T
3989         GradJb  = BT * _V
3990         GradJo  = - Ha( (_X, RI * (Y - _HX)) )
3991         GradJ   = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
3992         return GradJ
3993     #
3994     # Minimisation de la fonctionnelle
3995     # --------------------------------
3996     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
3997     #
3998     if selfA._parameters["Minimizer"] == "LBFGSB":
3999         if "0.19" <= scipy.version.version <= "1.1.0":
4000             import lbfgsbhlt as optimiseur
4001         else:
4002             import scipy.optimize as optimiseur
4003         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
4004             func        = CostFunction,
4005             x0          = Xini,
4006             fprime      = GradientOfCostFunction,
4007             args        = (),
4008             bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
4009             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
4010             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
4011             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
4012             iprint      = selfA._parameters["optiprint"],
4013             )
4014         nfeval = Informations['funcalls']
4015         rc     = Informations['warnflag']
4016     elif selfA._parameters["Minimizer"] == "TNC":
4017         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
4018             func        = CostFunction,
4019             x0          = Xini,
4020             fprime      = GradientOfCostFunction,
4021             args        = (),
4022             bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
4023             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
4024             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
4025             ftol        = selfA._parameters["CostDecrementTolerance"],
4026             messages    = selfA._parameters["optmessages"],
4027             )
4028     elif selfA._parameters["Minimizer"] == "CG":
4029         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
4030             f           = CostFunction,
4031             x0          = Xini,
4032             fprime      = GradientOfCostFunction,
4033             args        = (),
4034             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
4035             gtol        = selfA._parameters["GradientNormTolerance"],
4036             disp        = selfA._parameters["optdisp"],
4037             full_output = True,
4038             )
4039     elif selfA._parameters["Minimizer"] == "NCG":
4040         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
4041             f           = CostFunction,
4042             x0          = Xini,
4043             fprime      = GradientOfCostFunction,
4044             args        = (),
4045             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
4046             avextol     = selfA._parameters["CostDecrementTolerance"],
4047             disp        = selfA._parameters["optdisp"],
4048             full_output = True,
4049             )
4050     elif selfA._parameters["Minimizer"] == "BFGS":
4051         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
4052             f           = CostFunction,
4053             x0          = Xini,
4054             fprime      = GradientOfCostFunction,
4055             args        = (),
4056             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
4057             gtol        = selfA._parameters["GradientNormTolerance"],
4058             disp        = selfA._parameters["optdisp"],
4059             full_output = True,
4060             )
4061     else:
4062         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
4063     #
4064     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
4065     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
4066     #
4067     # Correction pour pallier a un bug de TNC sur le retour du Minimum
4068     # ----------------------------------------------------------------
4069     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
4070         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
4071         Minimum = numpy.asmatrix(numpy.ravel( Minimum )).T
4072     else:
4073         Minimum = Xb + B * numpy.asmatrix(numpy.ravel( Minimum )).T
4074     #
4075     # Obtention de l'analyse
4076     # ----------------------
4077     Xa = Minimum
4078     #
4079     selfA.StoredVariables["Analysis"].store( Xa )
4080     #
4081     if selfA._toStore("OMA") or \
4082         selfA._toStore("SigmaObs2") or \
4083         selfA._toStore("SimulationQuantiles") or \
4084         selfA._toStore("SimulatedObservationAtOptimum"):
4085         if selfA._toStore("SimulatedObservationAtCurrentState"):
4086             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
4087         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
4088             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
4089         else:
4090             HXa = Hm( Xa )
4091     #
4092     # Calcul de la covariance d'analyse
4093     # ---------------------------------
4094     if selfA._toStore("APosterioriCovariance") or \
4095         selfA._toStore("SimulationQuantiles") or \
4096         selfA._toStore("JacobianMatrixAtOptimum") or \
4097         selfA._toStore("KalmanGainAtOptimum"):
4098         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
4099         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
4100     if selfA._toStore("APosterioriCovariance") or \
4101         selfA._toStore("SimulationQuantiles") or \
4102         selfA._toStore("KalmanGainAtOptimum"):
4103         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
4104         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
4105     if selfA._toStore("APosterioriCovariance") or \
4106         selfA._toStore("SimulationQuantiles"):
4107         BI = B.getI()
4108         HessienneI = []
4109         nb = Xa.size
4110         for i in range(nb):
4111             _ee    = numpy.matrix(numpy.zeros(nb)).T
4112             _ee[i] = 1.
4113             _HtEE  = numpy.dot(HtM,_ee)
4114             _HtEE  = numpy.asmatrix(numpy.ravel( _HtEE )).T
4115             HessienneI.append( numpy.ravel( BI*_ee + HaM * (RI * _HtEE) ) )
4116         HessienneI = numpy.matrix( HessienneI )
4117         A = HessienneI.I
4118         if min(A.shape) != max(A.shape):
4119             raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(selfA._name,str(A.shape)))
4120         if (numpy.diag(A) < 0).any():
4121             raise ValueError("The %s a posteriori covariance matrix A has at least one negative value on its diagonal. There is an error in the observation operator, please check it."%(selfA._name,))
4122         if logging.getLogger().level < logging.WARNING: # La verification n'a lieu qu'en debug
4123             try:
4124                 L = numpy.linalg.cholesky( A )
4125             except:
4126                 raise ValueError("The %s a posteriori covariance matrix A is not symmetric positive-definite. Please check your a priori covariances and your observation operator."%(selfA._name,))
4127     if selfA._toStore("APosterioriCovariance"):
4128         selfA.StoredVariables["APosterioriCovariance"].store( A )
4129     if selfA._toStore("JacobianMatrixAtOptimum"):
4130         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
4131     if selfA._toStore("KalmanGainAtOptimum"):
4132         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
4133         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
4134         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
4135     #
4136     # Calculs et/ou stockages supplémentaires
4137     # ---------------------------------------
4138     if selfA._toStore("Innovation") or \
4139         selfA._toStore("SigmaObs2") or \
4140         selfA._toStore("MahalanobisConsistency") or \
4141         selfA._toStore("OMB"):
4142         d  = Y - HXb
4143     if selfA._toStore("Innovation"):
4144         selfA.StoredVariables["Innovation"].store( numpy.ravel(d) )
4145     if selfA._toStore("BMA"):
4146         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
4147     if selfA._toStore("OMA"):
4148         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
4149     if selfA._toStore("OMB"):
4150         selfA.StoredVariables["OMB"].store( numpy.ravel(d) )
4151     if selfA._toStore("SigmaObs2"):
4152         TraceR = R.trace(Y.size)
4153         selfA.StoredVariables["SigmaObs2"].store( float( (d.T * (numpy.asmatrix(numpy.ravel(Y)).T-numpy.asmatrix(numpy.ravel(HXa)).T)) ) / TraceR )
4154     if selfA._toStore("MahalanobisConsistency"):
4155         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
4156     if selfA._toStore("SimulationQuantiles"):
4157         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
4158     if selfA._toStore("SimulatedObservationAtBackground"):
4159         selfA.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
4160     if selfA._toStore("SimulatedObservationAtOptimum"):
4161         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
4162     #
4163     return 0
4164
4165 # ==============================================================================
4166 if __name__ == "__main__":
4167     print('\n AUTODIAGNOSTIC\n')