Salome HOME
Minor internal modifications
[modules/adao.git] / src / daComposant / daCore / NumericObjects.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2021 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 __doc__ = """
24     Définit les objets numériques génériques.
25 """
26 __author__ = "Jean-Philippe ARGAUD"
27
28 import os, time, copy, types, sys, logging
29 import math, numpy, scipy, scipy.optimize, scipy.version
30 from daCore.BasicObjects import Operator
31 from daCore.PlatformInfo import PlatformInfo
32 mpr = PlatformInfo().MachinePrecision()
33 mfp = PlatformInfo().MaximumPrecision()
34 # logging.getLogger().setLevel(logging.DEBUG)
35
36 # ==============================================================================
37 def ExecuteFunction( triplet ):
38     assert len(triplet) == 3, "Incorrect number of arguments"
39     X, xArgs, funcrepr = triplet
40     __X = numpy.ravel( X ).reshape((-1,1))
41     __sys_path_tmp = sys.path ; sys.path.insert(0,funcrepr["__userFunction__path"])
42     __module = __import__(funcrepr["__userFunction__modl"], globals(), locals(), [])
43     __fonction = getattr(__module,funcrepr["__userFunction__name"])
44     sys.path = __sys_path_tmp ; del __sys_path_tmp
45     if isinstance(xArgs, dict):
46         __HX  = __fonction( __X, **xArgs )
47     else:
48         __HX  = __fonction( __X )
49     return numpy.ravel( __HX )
50
51 # ==============================================================================
52 class FDApproximation(object):
53     """
54     Cette classe sert d'interface pour définir les opérateurs approximés. A la
55     création d'un objet, en fournissant une fonction "Function", on obtient un
56     objet qui dispose de 3 méthodes "DirectOperator", "TangentOperator" et
57     "AdjointOperator". On contrôle l'approximation DF avec l'incrément
58     multiplicatif "increment" valant par défaut 1%, ou avec l'incrément fixe
59     "dX" qui sera multiplié par "increment" (donc en %), et on effectue de DF
60     centrées si le booléen "centeredDF" est vrai.
61     """
62     def __init__(self,
63             name                  = "FDApproximation",
64             Function              = None,
65             centeredDF            = False,
66             increment             = 0.01,
67             dX                    = None,
68             extraArguments        = None,
69             avoidingRedundancy    = True,
70             toleranceInRedundancy = 1.e-18,
71             lenghtOfRedundancy    = -1,
72             mpEnabled             = False,
73             mpWorkers             = None,
74             mfEnabled             = False,
75             ):
76         self.__name = str(name)
77         self.__extraArgs = extraArguments
78         if mpEnabled:
79             try:
80                 import multiprocessing
81                 self.__mpEnabled = True
82             except ImportError:
83                 self.__mpEnabled = False
84         else:
85             self.__mpEnabled = False
86         self.__mpWorkers = mpWorkers
87         if self.__mpWorkers is not None and self.__mpWorkers < 1:
88             self.__mpWorkers = None
89         logging.debug("FDA Calculs en multiprocessing : %s (nombre de processus : %s)"%(self.__mpEnabled,self.__mpWorkers))
90         #
91         if mfEnabled:
92             self.__mfEnabled = True
93         else:
94             self.__mfEnabled = False
95         logging.debug("FDA Calculs en multifonctions : %s"%(self.__mfEnabled,))
96         #
97         if avoidingRedundancy:
98             self.__avoidRC = True
99             self.__tolerBP = float(toleranceInRedundancy)
100             self.__lenghtRJ = int(lenghtOfRedundancy)
101             self.__listJPCP = [] # Jacobian Previous Calculated Points
102             self.__listJPCI = [] # Jacobian Previous Calculated Increment
103             self.__listJPCR = [] # Jacobian Previous Calculated Results
104             self.__listJPPN = [] # Jacobian Previous Calculated Point Norms
105             self.__listJPIN = [] # Jacobian Previous Calculated Increment Norms
106         else:
107             self.__avoidRC = False
108         #
109         if self.__mpEnabled:
110             if isinstance(Function,types.FunctionType):
111                 logging.debug("FDA Calculs en multiprocessing : FunctionType")
112                 self.__userFunction__name = Function.__name__
113                 try:
114                     mod = os.path.join(Function.__globals__['filepath'],Function.__globals__['filename'])
115                 except:
116                     mod = os.path.abspath(Function.__globals__['__file__'])
117                 if not os.path.isfile(mod):
118                     raise ImportError("No user defined function or method found with the name %s"%(mod,))
119                 self.__userFunction__modl = os.path.basename(mod).replace('.pyc','').replace('.pyo','').replace('.py','')
120                 self.__userFunction__path = os.path.dirname(mod)
121                 del mod
122                 self.__userOperator = Operator( name = self.__name, fromMethod = Function, avoidingRedundancy = self.__avoidRC, inputAsMultiFunction = self.__mfEnabled, extraArguments = self.__extraArgs )
123                 self.__userFunction = self.__userOperator.appliedTo # Pour le calcul Direct
124             elif isinstance(Function,types.MethodType):
125                 logging.debug("FDA Calculs en multiprocessing : MethodType")
126                 self.__userFunction__name = Function.__name__
127                 try:
128                     mod = os.path.join(Function.__globals__['filepath'],Function.__globals__['filename'])
129                 except:
130                     mod = os.path.abspath(Function.__func__.__globals__['__file__'])
131                 if not os.path.isfile(mod):
132                     raise ImportError("No user defined function or method found with the name %s"%(mod,))
133                 self.__userFunction__modl = os.path.basename(mod).replace('.pyc','').replace('.pyo','').replace('.py','')
134                 self.__userFunction__path = os.path.dirname(mod)
135                 del mod
136                 self.__userOperator = Operator( name = self.__name, fromMethod = Function, avoidingRedundancy = self.__avoidRC, inputAsMultiFunction = self.__mfEnabled, extraArguments = self.__extraArgs )
137                 self.__userFunction = self.__userOperator.appliedTo # Pour le calcul Direct
138             else:
139                 raise TypeError("User defined function or method has to be provided for finite differences approximation.")
140         else:
141             self.__userOperator = Operator( name = self.__name, fromMethod = Function, avoidingRedundancy = self.__avoidRC, inputAsMultiFunction = self.__mfEnabled, extraArguments = self.__extraArgs )
142             self.__userFunction = self.__userOperator.appliedTo
143         #
144         self.__centeredDF = bool(centeredDF)
145         if abs(float(increment)) > 1.e-15:
146             self.__increment  = float(increment)
147         else:
148             self.__increment  = 0.01
149         if dX is None:
150             self.__dX     = None
151         else:
152             self.__dX     = numpy.ravel( dX )
153         logging.debug("FDA Reduction des doublons de calcul : %s"%self.__avoidRC)
154         if self.__avoidRC:
155             logging.debug("FDA Tolerance de determination des doublons : %.2e"%self.__tolerBP)
156
157     # ---------------------------------------------------------
158     def __doublon__(self, e, l, n, v=None):
159         __ac, __iac = False, -1
160         for i in range(len(l)-1,-1,-1):
161             if numpy.linalg.norm(e - l[i]) < self.__tolerBP * n[i]:
162                 __ac, __iac = True, i
163                 if v is not None: logging.debug("FDA Cas%s déja calculé, récupération du doublon %i"%(v,__iac))
164                 break
165         return __ac, __iac
166
167     # ---------------------------------------------------------
168     def DirectOperator(self, X, **extraArgs ):
169         """
170         Calcul du direct à l'aide de la fonction fournie.
171
172         NB : les extraArgs sont là pour assurer la compatibilité d'appel, mais
173         ne doivent pas être données ici à la fonction utilisateur.
174         """
175         logging.debug("FDA Calcul DirectOperator (explicite)")
176         if self.__mfEnabled:
177             _HX = self.__userFunction( X, argsAsSerie = True )
178         else:
179             _HX = numpy.ravel(self.__userFunction( numpy.ravel(X) ))
180         #
181         return _HX
182
183     # ---------------------------------------------------------
184     def TangentMatrix(self, X ):
185         """
186         Calcul de l'opérateur tangent comme la Jacobienne par différences finies,
187         c'est-à-dire le gradient de H en X. On utilise des différences finies
188         directionnelles autour du point X. X est un numpy.ndarray.
189
190         Différences finies centrées (approximation d'ordre 2):
191         1/ Pour chaque composante i de X, on ajoute et on enlève la perturbation
192            dX[i] à la  composante X[i], pour composer X_plus_dXi et X_moins_dXi, et
193            on calcule les réponses HX_plus_dXi = H( X_plus_dXi ) et HX_moins_dXi =
194            H( X_moins_dXi )
195         2/ On effectue les différences (HX_plus_dXi-HX_moins_dXi) et on divise par
196            le pas 2*dXi
197         3/ Chaque résultat, par composante, devient une colonne de la Jacobienne
198
199         Différences finies non centrées (approximation d'ordre 1):
200         1/ Pour chaque composante i de X, on ajoute la perturbation dX[i] à la
201            composante X[i] pour composer X_plus_dXi, et on calcule la réponse
202            HX_plus_dXi = H( X_plus_dXi )
203         2/ On calcule la valeur centrale HX = H(X)
204         3/ On effectue les différences (HX_plus_dXi-HX) et on divise par
205            le pas dXi
206         4/ Chaque résultat, par composante, devient une colonne de la Jacobienne
207
208         """
209         logging.debug("FDA Début du calcul de la Jacobienne")
210         logging.debug("FDA   Incrément de............: %s*X"%float(self.__increment))
211         logging.debug("FDA   Approximation centrée...: %s"%(self.__centeredDF))
212         #
213         if X is None or len(X)==0:
214             raise ValueError("Nominal point X for approximate derivatives can not be None or void (given X: %s)."%(str(X),))
215         #
216         _X = numpy.ravel( X )
217         #
218         if self.__dX is None:
219             _dX  = self.__increment * _X
220         else:
221             _dX = numpy.ravel( self.__dX )
222         assert len(_X) == len(_dX), "Inconsistent dX increment length with respect to the X one"
223         assert _X.size == _dX.size, "Inconsistent dX increment size with respect to the X one"
224         #
225         if (_dX == 0.).any():
226             moyenne = _dX.mean()
227             if moyenne == 0.:
228                 _dX = numpy.where( _dX == 0., float(self.__increment), _dX )
229             else:
230                 _dX = numpy.where( _dX == 0., moyenne, _dX )
231         #
232         __alreadyCalculated  = False
233         if self.__avoidRC:
234             __bidon, __alreadyCalculatedP = self.__doublon__(_X,  self.__listJPCP, self.__listJPPN, None)
235             __bidon, __alreadyCalculatedI = self.__doublon__(_dX, self.__listJPCI, self.__listJPIN, None)
236             if __alreadyCalculatedP == __alreadyCalculatedI > -1:
237                 __alreadyCalculated, __i = True, __alreadyCalculatedP
238                 logging.debug("FDA Cas J déjà calculé, récupération du doublon %i"%__i)
239         #
240         if __alreadyCalculated:
241             logging.debug("FDA   Calcul Jacobienne (par récupération du doublon %i)"%__i)
242             _Jacobienne = self.__listJPCR[__i]
243         else:
244             logging.debug("FDA   Calcul Jacobienne (explicite)")
245             if self.__centeredDF:
246                 #
247                 if self.__mpEnabled and not self.__mfEnabled:
248                     funcrepr = {
249                         "__userFunction__path" : self.__userFunction__path,
250                         "__userFunction__modl" : self.__userFunction__modl,
251                         "__userFunction__name" : self.__userFunction__name,
252                     }
253                     _jobs = []
254                     for i in range( len(_dX) ):
255                         _dXi            = _dX[i]
256                         _X_plus_dXi     = numpy.array( _X, dtype=float )
257                         _X_plus_dXi[i]  = _X[i] + _dXi
258                         _X_moins_dXi    = numpy.array( _X, dtype=float )
259                         _X_moins_dXi[i] = _X[i] - _dXi
260                         #
261                         _jobs.append( (_X_plus_dXi,  self.__extraArgs, funcrepr) )
262                         _jobs.append( (_X_moins_dXi, self.__extraArgs, funcrepr) )
263                     #
264                     import multiprocessing
265                     self.__pool = multiprocessing.Pool(self.__mpWorkers)
266                     _HX_plusmoins_dX = self.__pool.map( ExecuteFunction, _jobs )
267                     self.__pool.close()
268                     self.__pool.join()
269                     #
270                     _Jacobienne  = []
271                     for i in range( len(_dX) ):
272                         _Jacobienne.append( numpy.ravel( _HX_plusmoins_dX[2*i] - _HX_plusmoins_dX[2*i+1] ) / (2.*_dX[i]) )
273                     #
274                 elif self.__mfEnabled:
275                     _xserie = []
276                     for i in range( len(_dX) ):
277                         _dXi            = _dX[i]
278                         _X_plus_dXi     = numpy.array( _X, dtype=float )
279                         _X_plus_dXi[i]  = _X[i] + _dXi
280                         _X_moins_dXi    = numpy.array( _X, dtype=float )
281                         _X_moins_dXi[i] = _X[i] - _dXi
282                         #
283                         _xserie.append( _X_plus_dXi )
284                         _xserie.append( _X_moins_dXi )
285                     #
286                     _HX_plusmoins_dX = self.DirectOperator( _xserie )
287                      #
288                     _Jacobienne  = []
289                     for i in range( len(_dX) ):
290                         _Jacobienne.append( numpy.ravel( _HX_plusmoins_dX[2*i] - _HX_plusmoins_dX[2*i+1] ) / (2.*_dX[i]) )
291                     #
292                 else:
293                     _Jacobienne  = []
294                     for i in range( _dX.size ):
295                         _dXi            = _dX[i]
296                         _X_plus_dXi     = numpy.array( _X, dtype=float )
297                         _X_plus_dXi[i]  = _X[i] + _dXi
298                         _X_moins_dXi    = numpy.array( _X, dtype=float )
299                         _X_moins_dXi[i] = _X[i] - _dXi
300                         #
301                         _HX_plus_dXi    = self.DirectOperator( _X_plus_dXi )
302                         _HX_moins_dXi   = self.DirectOperator( _X_moins_dXi )
303                         #
304                         _Jacobienne.append( numpy.ravel( _HX_plus_dXi - _HX_moins_dXi ) / (2.*_dXi) )
305                 #
306             else:
307                 #
308                 if self.__mpEnabled and not self.__mfEnabled:
309                     funcrepr = {
310                         "__userFunction__path" : self.__userFunction__path,
311                         "__userFunction__modl" : self.__userFunction__modl,
312                         "__userFunction__name" : self.__userFunction__name,
313                     }
314                     _jobs = []
315                     _jobs.append( (_X, self.__extraArgs, funcrepr) )
316                     for i in range( len(_dX) ):
317                         _X_plus_dXi    = numpy.array( _X, dtype=float )
318                         _X_plus_dXi[i] = _X[i] + _dX[i]
319                         #
320                         _jobs.append( (_X_plus_dXi, self.__extraArgs, funcrepr) )
321                     #
322                     import multiprocessing
323                     self.__pool = multiprocessing.Pool(self.__mpWorkers)
324                     _HX_plus_dX = self.__pool.map( ExecuteFunction, _jobs )
325                     self.__pool.close()
326                     self.__pool.join()
327                     #
328                     _HX = _HX_plus_dX.pop(0)
329                     #
330                     _Jacobienne = []
331                     for i in range( len(_dX) ):
332                         _Jacobienne.append( numpy.ravel(( _HX_plus_dX[i] - _HX ) / _dX[i]) )
333                     #
334                 elif self.__mfEnabled:
335                     _xserie = []
336                     _xserie.append( _X )
337                     for i in range( len(_dX) ):
338                         _X_plus_dXi    = numpy.array( _X, dtype=float )
339                         _X_plus_dXi[i] = _X[i] + _dX[i]
340                         #
341                         _xserie.append( _X_plus_dXi )
342                     #
343                     _HX_plus_dX = self.DirectOperator( _xserie )
344                     #
345                     _HX = _HX_plus_dX.pop(0)
346                     #
347                     _Jacobienne = []
348                     for i in range( len(_dX) ):
349                         _Jacobienne.append( numpy.ravel(( _HX_plus_dX[i] - _HX ) / _dX[i]) )
350                    #
351                 else:
352                     _Jacobienne  = []
353                     _HX = self.DirectOperator( _X )
354                     for i in range( _dX.size ):
355                         _dXi            = _dX[i]
356                         _X_plus_dXi     = numpy.array( _X, dtype=float )
357                         _X_plus_dXi[i]  = _X[i] + _dXi
358                         #
359                         _HX_plus_dXi = self.DirectOperator( _X_plus_dXi )
360                         #
361                         _Jacobienne.append( numpy.ravel(( _HX_plus_dXi - _HX ) / _dXi) )
362                 #
363             #
364             _Jacobienne = numpy.transpose( numpy.vstack( _Jacobienne ) )
365             if self.__avoidRC:
366                 if self.__lenghtRJ < 0: self.__lenghtRJ = 2 * _X.size
367                 while len(self.__listJPCP) > self.__lenghtRJ:
368                     self.__listJPCP.pop(0)
369                     self.__listJPCI.pop(0)
370                     self.__listJPCR.pop(0)
371                     self.__listJPPN.pop(0)
372                     self.__listJPIN.pop(0)
373                 self.__listJPCP.append( copy.copy(_X) )
374                 self.__listJPCI.append( copy.copy(_dX) )
375                 self.__listJPCR.append( copy.copy(_Jacobienne) )
376                 self.__listJPPN.append( numpy.linalg.norm(_X) )
377                 self.__listJPIN.append( numpy.linalg.norm(_Jacobienne) )
378         #
379         logging.debug("FDA Fin du calcul de la Jacobienne")
380         #
381         return _Jacobienne
382
383     # ---------------------------------------------------------
384     def TangentOperator(self, paire, **extraArgs ):
385         """
386         Calcul du tangent à l'aide de la Jacobienne.
387
388         NB : les extraArgs sont là pour assurer la compatibilité d'appel, mais
389         ne doivent pas être données ici à la fonction utilisateur.
390         """
391         if self.__mfEnabled:
392             assert len(paire) == 1, "Incorrect lenght of arguments"
393             _paire = paire[0]
394             assert len(_paire) == 2, "Incorrect number of arguments"
395         else:
396             assert len(paire) == 2, "Incorrect number of arguments"
397             _paire = paire
398         X, dX = _paire
399         _Jacobienne = self.TangentMatrix( X )
400         if dX is None or len(dX) == 0:
401             #
402             # Calcul de la forme matricielle si le second argument est None
403             # -------------------------------------------------------------
404             if self.__mfEnabled: return [_Jacobienne,]
405             else:                return _Jacobienne
406         else:
407             #
408             # Calcul de la valeur linéarisée de H en X appliqué à dX
409             # ------------------------------------------------------
410             _dX = numpy.ravel( dX )
411             _HtX = numpy.dot(_Jacobienne, _dX)
412             if self.__mfEnabled: return [_HtX,]
413             else:                return _HtX
414
415     # ---------------------------------------------------------
416     def AdjointOperator(self, paire, **extraArgs ):
417         """
418         Calcul de l'adjoint à l'aide de la Jacobienne.
419
420         NB : les extraArgs sont là pour assurer la compatibilité d'appel, mais
421         ne doivent pas être données ici à la fonction utilisateur.
422         """
423         if self.__mfEnabled:
424             assert len(paire) == 1, "Incorrect lenght of arguments"
425             _paire = paire[0]
426             assert len(_paire) == 2, "Incorrect number of arguments"
427         else:
428             assert len(paire) == 2, "Incorrect number of arguments"
429             _paire = paire
430         X, Y = _paire
431         _JacobienneT = self.TangentMatrix( X ).T
432         if Y is None or len(Y) == 0:
433             #
434             # Calcul de la forme matricielle si le second argument est None
435             # -------------------------------------------------------------
436             if self.__mfEnabled: return [_JacobienneT,]
437             else:                return _JacobienneT
438         else:
439             #
440             # Calcul de la valeur de l'adjoint en X appliqué à Y
441             # --------------------------------------------------
442             _Y = numpy.ravel( Y )
443             _HaY = numpy.dot(_JacobienneT, _Y)
444             if self.__mfEnabled: return [_HaY,]
445             else:                return _HaY
446
447 # ==============================================================================
448 def EnsembleOfCenteredPerturbations( _bgcenter, _bgcovariance, _nbmembers ):
449     "Génération d'un ensemble de taille _nbmembers-1 d'états aléatoires centrés"
450     #
451     _bgcenter = numpy.ravel(_bgcenter)[:,None]
452     if _nbmembers < 1:
453         raise ValueError("Number of members has to be strictly more than 1 (given number: %s)."%(str(_nbmembers),))
454     #
455     if _bgcovariance is None:
456         BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers)
457     else:
458         _Z = numpy.random.multivariate_normal(numpy.zeros(_bgcenter.size), _bgcovariance, size=_nbmembers).T
459         BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers) + _Z
460     #
461     return BackgroundEnsemble
462
463 # ==============================================================================
464 def EnsembleOfBackgroundPerturbations( _bgcenter, _bgcovariance, _nbmembers, _withSVD = True):
465     "Génération d'un ensemble de taille _nbmembers-1 d'états aléatoires centrés"
466     def __CenteredRandomAnomalies(Zr, N):
467         """
468         Génère une matrice de N anomalies aléatoires centrées sur Zr selon les
469         notes manuscrites de MB et conforme au code de PS avec eps = -1
470         """
471         eps = -1
472         Q = numpy.identity(N-1)-numpy.ones((N-1,N-1))/numpy.sqrt(N)/(numpy.sqrt(N)-eps)
473         Q = numpy.concatenate((Q, [eps*numpy.ones(N-1)/numpy.sqrt(N)]), axis=0)
474         R, _ = numpy.linalg.qr(numpy.random.normal(size = (N-1,N-1)))
475         Q = numpy.dot(Q,R)
476         Zr = numpy.dot(Q,Zr)
477         return Zr.T
478     #
479     _bgcenter = numpy.ravel(_bgcenter).reshape((-1,1))
480     if _nbmembers < 1:
481         raise ValueError("Number of members has to be strictly more than 1 (given number: %s)."%(str(_nbmembers),))
482     if _bgcovariance is None:
483         BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers)
484     else:
485         if _withSVD:
486             U, s, V = numpy.linalg.svd(_bgcovariance, full_matrices=False)
487             _nbctl = _bgcenter.size
488             if _nbmembers > _nbctl:
489                 _Z = numpy.concatenate((numpy.dot(
490                     numpy.diag(numpy.sqrt(s[:_nbctl])), V[:_nbctl]),
491                     numpy.random.multivariate_normal(numpy.zeros(_nbctl),_bgcovariance,_nbmembers-1-_nbctl)), axis = 0)
492             else:
493                 _Z = numpy.dot(numpy.diag(numpy.sqrt(s[:_nbmembers-1])), V[:_nbmembers-1])
494             _Zca = __CenteredRandomAnomalies(_Z, _nbmembers)
495             BackgroundEnsemble = _bgcenter + _Zca
496         else:
497             if max(abs(_bgcovariance.flatten())) > 0:
498                 _nbctl = _bgcenter.size
499                 _Z = numpy.random.multivariate_normal(numpy.zeros(_nbctl),_bgcovariance,_nbmembers-1)
500                 _Zca = __CenteredRandomAnomalies(_Z, _nbmembers)
501                 BackgroundEnsemble = _bgcenter + _Zca
502             else:
503                 BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers)
504     #
505     return BackgroundEnsemble
506
507 # ==============================================================================
508 def EnsembleMean( __Ensemble ):
509     "Renvoie la moyenne empirique d'un ensemble"
510     return numpy.asarray(__Ensemble).mean(axis=1, dtype=mfp).astype('float').reshape((-1,1))
511
512 # ==============================================================================
513 def EnsembleOfAnomalies( __Ensemble, __OptMean = None, __Normalisation = 1.):
514     "Renvoie les anomalies centrées à partir d'un ensemble"
515     if __OptMean is None:
516         __Em = EnsembleMean( __Ensemble )
517     else:
518         __Em = numpy.ravel( __OptMean ).reshape((-1,1))
519     #
520     return __Normalisation * (numpy.asarray( __Ensemble ) - __Em)
521
522 # ==============================================================================
523 def EnsembleErrorCovariance( __Ensemble, __quick = False ):
524     "Renvoie l'estimation empirique de la covariance d'ensemble"
525     if __quick:
526         # Covariance rapide mais rarement définie positive
527         __Covariance = numpy.cov( __Ensemble )
528     else:
529         # Résultat souvent identique à numpy.cov, mais plus robuste
530         __n, __m = numpy.asarray( __Ensemble ).shape
531         __Anomalies = EnsembleOfAnomalies( __Ensemble )
532         # Estimation empirique
533         __Covariance = ( __Anomalies @ __Anomalies.T ) / (__m-1)
534         # Assure la symétrie
535         __Covariance = ( __Covariance + __Covariance.T ) * 0.5
536         # Assure la positivité
537         __epsilon    = mpr*numpy.trace( __Covariance )
538         __Covariance = __Covariance + __epsilon * numpy.identity(__n)
539     #
540     return __Covariance
541
542 # ==============================================================================
543 def EnsemblePerturbationWithGivenCovariance( __Ensemble, __Covariance, __Seed=None ):
544     "Ajout d'une perturbation à chaque membre d'un ensemble selon une covariance prescrite"
545     if hasattr(__Covariance,"assparsematrix"):
546         if (abs(__Ensemble).mean() > mpr) and (abs(__Covariance.assparsematrix())/abs(__Ensemble).mean() < mpr).all():
547             # Traitement d'une covariance nulle ou presque
548             return __Ensemble
549         if (abs(__Ensemble).mean() <= mpr) and (abs(__Covariance.assparsematrix()) < mpr).all():
550             # Traitement d'une covariance nulle ou presque
551             return __Ensemble
552     else:
553         if (abs(__Ensemble).mean() > mpr) and (abs(__Covariance)/abs(__Ensemble).mean() < mpr).all():
554             # Traitement d'une covariance nulle ou presque
555             return __Ensemble
556         if (abs(__Ensemble).mean() <= mpr) and (abs(__Covariance) < mpr).all():
557             # Traitement d'une covariance nulle ou presque
558             return __Ensemble
559     #
560     __n, __m = __Ensemble.shape
561     if __Seed is not None: numpy.random.seed(__Seed)
562     #
563     if hasattr(__Covariance,"isscalar") and __Covariance.isscalar():
564         # Traitement d'une covariance multiple de l'identité
565         __zero = 0.
566         __std  = numpy.sqrt(__Covariance.assparsematrix())
567         __Ensemble += numpy.random.normal(__zero, __std, size=(__m,__n)).T
568     #
569     elif hasattr(__Covariance,"isvector") and __Covariance.isvector():
570         # Traitement d'une covariance diagonale avec variances non identiques
571         __zero = numpy.zeros(__n)
572         __std  = numpy.sqrt(__Covariance.assparsematrix())
573         __Ensemble += numpy.asarray([numpy.random.normal(__zero, __std) for i in range(__m)]).T
574     #
575     elif hasattr(__Covariance,"ismatrix") and __Covariance.ismatrix():
576         # Traitement d'une covariance pleine
577         __Ensemble += numpy.random.multivariate_normal(numpy.zeros(__n), __Covariance.asfullmatrix(__n), size=__m).T
578     #
579     elif isinstance(__Covariance, numpy.ndarray):
580         # Traitement d'une covariance numpy pleine, sachant qu'on arrive ici en dernier
581         __Ensemble += numpy.random.multivariate_normal(numpy.zeros(__n), __Covariance, size=__m).T
582     #
583     else:
584         raise ValueError("Error in ensemble perturbation with inadequate covariance specification")
585     #
586     return __Ensemble
587
588 # ==============================================================================
589 def CovarianceInflation(
590         InputCovOrEns,
591         InflationType   = None,
592         InflationFactor = None,
593         BackgroundCov   = None,
594         ):
595     """
596     Inflation applicable soit sur Pb ou Pa, soit sur les ensembles EXb ou EXa
597
598     Synthèse : Hunt 2007, section 2.3.5
599     """
600     if InflationFactor is None:
601         return InputCovOrEns
602     else:
603         InflationFactor = float(InflationFactor)
604     #
605     if InflationType in ["MultiplicativeOnAnalysisCovariance", "MultiplicativeOnBackgroundCovariance"]:
606         if InflationFactor < 1.:
607             raise ValueError("Inflation factor for multiplicative inflation has to be greater or equal than 1.")
608         if InflationFactor < 1.+mpr:
609             return InputCovOrEns
610         OutputCovOrEns = InflationFactor**2 * InputCovOrEns
611     #
612     elif InflationType in ["MultiplicativeOnAnalysisAnomalies", "MultiplicativeOnBackgroundAnomalies"]:
613         if InflationFactor < 1.:
614             raise ValueError("Inflation factor for multiplicative inflation has to be greater or equal than 1.")
615         if InflationFactor < 1.+mpr:
616             return InputCovOrEns
617         InputCovOrEnsMean = InputCovOrEns.mean(axis=1, dtype=mfp).astype('float')
618         OutputCovOrEns = InputCovOrEnsMean[:,numpy.newaxis] \
619             + InflationFactor * (InputCovOrEns - InputCovOrEnsMean[:,numpy.newaxis])
620     #
621     elif InflationType in ["AdditiveOnAnalysisCovariance", "AdditiveOnBackgroundCovariance"]:
622         if InflationFactor < 0.:
623             raise ValueError("Inflation factor for additive inflation has to be greater or equal than 0.")
624         if InflationFactor < mpr:
625             return InputCovOrEns
626         __n, __m = numpy.asarray(InputCovOrEns).shape
627         if __n != __m:
628             raise ValueError("Additive inflation can only be applied to squared (covariance) matrix.")
629         OutputCovOrEns = (1. - InflationFactor) * InputCovOrEns + InflationFactor * numpy.identity(__n)
630     #
631     elif InflationType == "HybridOnBackgroundCovariance":
632         if InflationFactor < 0.:
633             raise ValueError("Inflation factor for hybrid inflation has to be greater or equal than 0.")
634         if InflationFactor < mpr:
635             return InputCovOrEns
636         __n, __m = numpy.asarray(InputCovOrEns).shape
637         if __n != __m:
638             raise ValueError("Additive inflation can only be applied to squared (covariance) matrix.")
639         if BackgroundCov is None:
640             raise ValueError("Background covariance matrix B has to be given for hybrid inflation.")
641         if InputCovOrEns.shape != BackgroundCov.shape:
642             raise ValueError("Ensemble covariance matrix has to be of same size than background covariance matrix B.")
643         OutputCovOrEns = (1. - InflationFactor) * InputCovOrEns + InflationFactor * BackgroundCov
644     #
645     elif InflationType == "Relaxation":
646         raise NotImplementedError("InflationType Relaxation")
647     #
648     else:
649         raise ValueError("Error in inflation type, '%s' is not a valid keyword."%InflationType)
650     #
651     return OutputCovOrEns
652
653 # ==============================================================================
654 def HessienneEstimation(nb, HaM, HtM, BI, RI):
655     "Estimation de la Hessienne"
656     #
657     HessienneI = []
658     for i in range(int(nb)):
659         _ee    = numpy.zeros((nb,1))
660         _ee[i] = 1.
661         _HtEE  = numpy.dot(HtM,_ee).reshape((-1,1))
662         HessienneI.append( numpy.ravel( BI * _ee + HaM * (RI * _HtEE) ) )
663     #
664     A = numpy.linalg.inv(numpy.array( HessienneI ))
665     #
666     if min(A.shape) != max(A.shape):
667         raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(selfA._name,str(A.shape)))
668     if (numpy.diag(A) < 0).any():
669         raise ValueError("The %s a posteriori covariance matrix A has at least one negative value on its diagonal. There is an error in the observation operator, please check it."%(selfA._name,))
670     if logging.getLogger().level < logging.WARNING: # La verification n'a lieu qu'en debug
671         try:
672             L = numpy.linalg.cholesky( A )
673         except:
674             raise ValueError("The %s a posteriori covariance matrix A is not symmetric positive-definite. Please check your a priori covariances and your observation operator."%(selfA._name,))
675     #
676     return A
677
678 # ==============================================================================
679 def QuantilesEstimations(selfA, A, Xa, HXa = None, Hm = None, HtM = None):
680     "Estimation des quantiles a posteriori (selfA est modifié)"
681     nbsamples = selfA._parameters["NumberOfSamplesForQuantiles"]
682     #
683     # Traitement des bornes
684     if "StateBoundsForQuantiles" in selfA._parameters:
685         LBounds = selfA._parameters["StateBoundsForQuantiles"] # Prioritaire
686     elif "Bounds" in selfA._parameters:
687         LBounds = selfA._parameters["Bounds"]  # Défaut raisonnable
688     else:
689         LBounds = None
690     if LBounds is not None:
691         LBounds = ForceNumericBounds( LBounds )
692     _Xa = numpy.ravel(Xa)
693     #
694     # Échantillonnage des états
695     YfQ  = None
696     EXr  = None
697     for i in range(nbsamples):
698         if selfA._parameters["SimulationForQuantiles"] == "Linear" and HtM is not None and HXa is not None:
699             dXr = (numpy.random.multivariate_normal(_Xa,A) - _Xa).reshape((-1,1))
700             if LBounds is not None: # "EstimateProjection" par défaut
701                 dXr = numpy.max(numpy.hstack((dXr,LBounds[:,0].reshape((-1,1))) - Xa),axis=1)
702                 dXr = numpy.min(numpy.hstack((dXr,LBounds[:,1].reshape((-1,1))) - Xa),axis=1)
703             dYr = HtM @ dXr
704             Yr = HXa.reshape((-1,1)) + dYr
705             if selfA._toStore("SampledStateForQuantiles"): Xr = _Xa + numpy.ravel(dXr)
706         elif selfA._parameters["SimulationForQuantiles"] == "NonLinear" and Hm is not None:
707             Xr = numpy.random.multivariate_normal(_Xa,A)
708             if LBounds is not None: # "EstimateProjection" par défaut
709                 Xr = numpy.max(numpy.hstack((Xr.reshape((-1,1)),LBounds[:,0].reshape((-1,1)))),axis=1)
710                 Xr = numpy.min(numpy.hstack((Xr.reshape((-1,1)),LBounds[:,1].reshape((-1,1)))),axis=1)
711             Yr = Hm( Xr )
712         else:
713             raise ValueError("Quantile simulations has only to be Linear or NonLinear.")
714         #
715         if YfQ is None:
716             YfQ = Yr.reshape((-1,1))
717             if selfA._toStore("SampledStateForQuantiles"): EXr = Xr.reshape((-1,1))
718         else:
719             YfQ = numpy.hstack((YfQ,Yr.reshape((-1,1))))
720             if selfA._toStore("SampledStateForQuantiles"): EXr = numpy.hstack((EXr,Xr.reshape((-1,1))))
721     #
722     # Extraction des quantiles
723     YfQ.sort(axis=-1)
724     YQ = None
725     for quantile in selfA._parameters["Quantiles"]:
726         if not (0. <= float(quantile) <= 1.): continue
727         indice = int(nbsamples * float(quantile) - 1./nbsamples)
728         if YQ is None: YQ = YfQ[:,indice].reshape((-1,1))
729         else:          YQ = numpy.hstack((YQ,YfQ[:,indice].reshape((-1,1))))
730     if YQ is not None: # Liste non vide de quantiles
731         selfA.StoredVariables["SimulationQuantiles"].store( YQ )
732     if selfA._toStore("SampledStateForQuantiles"):
733         selfA.StoredVariables["SampledStateForQuantiles"].store( EXr )
734     #
735     return 0
736
737 # ==============================================================================
738 def ForceNumericBounds( __Bounds ):
739     "Force les bornes à être des valeurs numériques, sauf si globalement None"
740     # Conserve une valeur par défaut à None s'il n'y a pas de bornes
741     if __Bounds is None: return None
742     # Converti toutes les bornes individuelles None à +/- l'infini
743     __Bounds = numpy.asarray( __Bounds, dtype=float )
744     if len(__Bounds.shape) != 2 or min(__Bounds.shape) <= 0 or __Bounds.shape[1] != 2:
745         raise ValueError("Incorrectly shaped bounds data")
746     __Bounds[numpy.isnan(__Bounds[:,0]),0] = -sys.float_info.max
747     __Bounds[numpy.isnan(__Bounds[:,1]),1] =  sys.float_info.max
748     return __Bounds
749
750 # ==============================================================================
751 def RecentredBounds( __Bounds, __Center):
752     "Recentre les bornes autour de 0, sauf si globalement None"
753     # Conserve une valeur par défaut à None s'il n'y a pas de bornes
754     if __Bounds is None: return None
755     # Recentre les valeurs numériques de bornes
756     return ForceNumericBounds( __Bounds ) - numpy.ravel( __Center ).transpose((-1,1))
757
758 # ==============================================================================
759 def ApplyBounds( __Vector, __Bounds, __newClip = True):
760     "Applique des bornes numériques à un point"
761     # Conserve une valeur par défaut s'il n'y a pas de bornes
762     if __Bounds is None: return __Vector
763     #
764     if not isinstance(__Vector, numpy.ndarray): # Is an array
765         raise ValueError("Incorrect array definition of vector data")
766     if not isinstance(__Bounds, numpy.ndarray): # Is an array
767         raise ValueError("Incorrect array definition of bounds data")
768     if 2*__Vector.size != __Bounds.size: # Is a 2 column array of vector lenght
769         raise ValueError("Incorrect bounds number to be applied for this vector")
770     if len(__Bounds.shape) != 2 or min(__Bounds.shape) <= 0 or __Bounds.shape[1] != 2:
771         raise ValueError("Incorrectly shaped bounds data")
772     #
773     if __newClip:
774         __Vector = __Vector.clip(
775             __Bounds[:,0].reshape(__Vector.shape),
776             __Bounds[:,1].reshape(__Vector.shape),
777             )
778     else:
779         __Vector = numpy.max(numpy.hstack((__Vector.reshape((-1,1)),numpy.asmatrix(__Bounds)[:,0])),axis=1)
780         __Vector = numpy.min(numpy.hstack((__Vector.reshape((-1,1)),numpy.asmatrix(__Bounds)[:,1])),axis=1)
781         __Vector = numpy.asarray(__Vector)
782     #
783     return __Vector
784
785 # ==============================================================================
786 def c2ukf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
787     """
788     Constrained Unscented Kalman Filter
789     """
790     if selfA._parameters["EstimationOf"] == "Parameters":
791         selfA._parameters["StoreInternalVariables"] = True
792     selfA._parameters["Bounds"] = ForceNumericBounds( selfA._parameters["Bounds"] )
793     #
794     L     = Xb.size
795     Alpha = selfA._parameters["Alpha"]
796     Beta  = selfA._parameters["Beta"]
797     if selfA._parameters["Kappa"] == 0:
798         if selfA._parameters["EstimationOf"] == "State":
799             Kappa = 0
800         elif selfA._parameters["EstimationOf"] == "Parameters":
801             Kappa = 3 - L
802     else:
803         Kappa = selfA._parameters["Kappa"]
804     Lambda = float( Alpha**2 ) * ( L + Kappa ) - L
805     Gamma  = math.sqrt( L + Lambda )
806     #
807     Ww = []
808     Ww.append( 0. )
809     for i in range(2*L):
810         Ww.append( 1. / (2.*(L + Lambda)) )
811     #
812     Wm = numpy.array( Ww )
813     Wm[0] = Lambda / (L + Lambda)
814     Wc = numpy.array( Ww )
815     Wc[0] = Lambda / (L + Lambda) + (1. - Alpha**2 + Beta)
816     #
817     # Opérateurs
818     Hm = HO["Direct"].appliedControledFormTo
819     #
820     if selfA._parameters["EstimationOf"] == "State":
821         Mm = EM["Direct"].appliedControledFormTo
822     #
823     if CM is not None and "Tangent" in CM and U is not None:
824         Cm = CM["Tangent"].asMatrix(Xb)
825     else:
826         Cm = None
827     #
828     # Durée d'observation et tailles
829     if hasattr(Y,"stepnumber"):
830         duration = Y.stepnumber()
831         __p = numpy.cumprod(Y.shape())[-1]
832     else:
833         duration = 2
834         __p = numpy.array(Y).size
835     #
836     # Précalcul des inversions de B et R
837     if selfA._parameters["StoreInternalVariables"] \
838         or selfA._toStore("CostFunctionJ") \
839         or selfA._toStore("CostFunctionJb") \
840         or selfA._toStore("CostFunctionJo") \
841         or selfA._toStore("CurrentOptimum") \
842         or selfA._toStore("APosterioriCovariance"):
843         BI = B.getI()
844         RI = R.getI()
845     #
846     __n = Xb.size
847     #
848     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
849         Xn = Xb
850         if hasattr(B,"asfullmatrix"):
851             Pn = B.asfullmatrix(__n)
852         else:
853             Pn = B
854         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
855         selfA.StoredVariables["Analysis"].store( Xb )
856         if selfA._toStore("APosterioriCovariance"):
857             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
858     elif selfA._parameters["nextStep"]:
859         Xn = selfA._getInternalState("Xn")
860         Pn = selfA._getInternalState("Pn")
861     #
862     if selfA._parameters["EstimationOf"] == "Parameters":
863         XaMin            = Xn
864         previousJMinimum = numpy.finfo(float).max
865     #
866     for step in range(duration-1):
867         if hasattr(Y,"store"):
868             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
869         else:
870             Ynpu = numpy.ravel( Y ).reshape((__p,1))
871         #
872         if U is not None:
873             if hasattr(U,"store") and len(U)>1:
874                 Un = numpy.ravel( U[step] ).reshape((-1,1))
875             elif hasattr(U,"store") and len(U)==1:
876                 Un = numpy.ravel( U[0] ).reshape((-1,1))
877             else:
878                 Un = numpy.ravel( U ).reshape((-1,1))
879         else:
880             Un = None
881         #
882         Pndemi = numpy.real(scipy.linalg.sqrtm(Pn))
883         Xnp = numpy.hstack([Xn, Xn+Gamma*Pndemi, Xn-Gamma*Pndemi])
884         nbSpts = 2*Xn.size+1
885         #
886         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
887             for point in range(nbSpts):
888                 Xnp[:,point] = ApplyBounds( Xnp[:,point], selfA._parameters["Bounds"] )
889         #
890         XEtnnp = []
891         for point in range(nbSpts):
892             if selfA._parameters["EstimationOf"] == "State":
893                 XEtnnpi = numpy.asarray( Mm( (Xnp[:,point], Un) ) ).reshape((-1,1))
894                 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
895                     Cm = Cm.reshape(Xn.size,Un.size) # ADAO & check shape
896                     XEtnnpi = XEtnnpi + Cm * Un
897                 if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
898                     XEtnnpi = ApplyBounds( XEtnnpi, selfA._parameters["Bounds"] )
899             elif selfA._parameters["EstimationOf"] == "Parameters":
900                 # --- > Par principe, M = Id, Q = 0
901                 XEtnnpi = Xnp[:,point]
902             XEtnnp.append( numpy.ravel(XEtnnpi).reshape((-1,1)) )
903         XEtnnp = numpy.concatenate( XEtnnp, axis=1 )
904         #
905         Xncm = ( XEtnnp * Wm ).sum(axis=1)
906         #
907         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
908             Xncm = ApplyBounds( Xncm, selfA._parameters["Bounds"] )
909         #
910         if selfA._parameters["EstimationOf"] == "State":        Pnm = Q
911         elif selfA._parameters["EstimationOf"] == "Parameters": Pnm = 0.
912         for point in range(nbSpts):
913             Pnm += Wc[i] * ((XEtnnp[:,point]-Xncm).reshape((-1,1)) * (XEtnnp[:,point]-Xncm))
914         #
915         if selfA._parameters["EstimationOf"] == "Parameters" and selfA._parameters["Bounds"] is not None:
916             Pnmdemi = selfA._parameters["Reconditioner"] * numpy.real(scipy.linalg.sqrtm(Pnm))
917         else:
918             Pnmdemi = numpy.real(scipy.linalg.sqrtm(Pnm))
919         #
920         Xnnp = numpy.hstack([Xncm.reshape((-1,1)), Xncm.reshape((-1,1))+Gamma*Pnmdemi, Xncm.reshape((-1,1))-Gamma*Pnmdemi])
921         #
922         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
923             for point in range(nbSpts):
924                 Xnnp[:,point] = ApplyBounds( Xnnp[:,point], selfA._parameters["Bounds"] )
925         #
926         Ynnp = []
927         for point in range(nbSpts):
928             if selfA._parameters["EstimationOf"] == "State":
929                 Ynnpi = Hm( (Xnnp[:,point], None) )
930             elif selfA._parameters["EstimationOf"] == "Parameters":
931                 Ynnpi = Hm( (Xnnp[:,point], Un) )
932             Ynnp.append( numpy.ravel(Ynnpi).reshape((-1,1)) )
933         Ynnp = numpy.concatenate( Ynnp, axis=1 )
934         #
935         Yncm = ( Ynnp * Wm ).sum(axis=1)
936         #
937         Pyyn = R
938         Pxyn = 0.
939         for point in range(nbSpts):
940             Pyyn += Wc[i] * ((Ynnp[:,point]-Yncm).reshape((-1,1)) * (Ynnp[:,point]-Yncm))
941             Pxyn += Wc[i] * ((Xnnp[:,point]-Xncm).reshape((-1,1)) * (Ynnp[:,point]-Yncm))
942         #
943         _Innovation  = Ynpu - Yncm.reshape((-1,1))
944         if selfA._parameters["EstimationOf"] == "Parameters":
945             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
946                 _Innovation = _Innovation - Cm * Un
947         #
948         Kn = Pxyn * Pyyn.I
949         Xn = Xncm.reshape((-1,1)) + Kn * _Innovation
950         Pn = Pnm - Kn * Pyyn * Kn.T
951         #
952         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
953             Xn = ApplyBounds( Xn, selfA._parameters["Bounds"] )
954         #
955         Xa = Xn # Pointeurs
956         #--------------------------
957         selfA._setInternalState("Xn", Xn)
958         selfA._setInternalState("Pn", Pn)
959         #--------------------------
960         #
961         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
962         # ---> avec analysis
963         selfA.StoredVariables["Analysis"].store( Xa )
964         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
965             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( Hm((Xa, Un)) )
966         if selfA._toStore("InnovationAtCurrentAnalysis"):
967             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
968         # ---> avec current state
969         if selfA._parameters["StoreInternalVariables"] \
970             or selfA._toStore("CurrentState"):
971             selfA.StoredVariables["CurrentState"].store( Xn )
972         if selfA._toStore("ForecastState"):
973             selfA.StoredVariables["ForecastState"].store( Xncm )
974         if selfA._toStore("ForecastCovariance"):
975             selfA.StoredVariables["ForecastCovariance"].store( Pnm )
976         if selfA._toStore("BMA"):
977             selfA.StoredVariables["BMA"].store( Xncm - Xa )
978         if selfA._toStore("InnovationAtCurrentState"):
979             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
980         if selfA._toStore("SimulatedObservationAtCurrentState") \
981             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
982             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( Yncm )
983         # ---> autres
984         if selfA._parameters["StoreInternalVariables"] \
985             or selfA._toStore("CostFunctionJ") \
986             or selfA._toStore("CostFunctionJb") \
987             or selfA._toStore("CostFunctionJo") \
988             or selfA._toStore("CurrentOptimum") \
989             or selfA._toStore("APosterioriCovariance"):
990             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
991             Jo  = float( 0.5 * _Innovation.T * (RI * _Innovation) )
992             J   = Jb + Jo
993             selfA.StoredVariables["CostFunctionJb"].store( Jb )
994             selfA.StoredVariables["CostFunctionJo"].store( Jo )
995             selfA.StoredVariables["CostFunctionJ" ].store( J )
996             #
997             if selfA._toStore("IndexOfOptimum") \
998                 or selfA._toStore("CurrentOptimum") \
999                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
1000                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
1001                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
1002                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1003                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1004             if selfA._toStore("IndexOfOptimum"):
1005                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1006             if selfA._toStore("CurrentOptimum"):
1007                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
1008             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1009                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
1010             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1011                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1012             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1013                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1014             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1015                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1016         if selfA._toStore("APosterioriCovariance"):
1017             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
1018         if selfA._parameters["EstimationOf"] == "Parameters" \
1019             and J < previousJMinimum:
1020             previousJMinimum    = J
1021             XaMin               = Xa
1022             if selfA._toStore("APosterioriCovariance"):
1023                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
1024     #
1025     # Stockage final supplémentaire de l'optimum en estimation de paramètres
1026     # ----------------------------------------------------------------------
1027     if selfA._parameters["EstimationOf"] == "Parameters":
1028         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1029         selfA.StoredVariables["Analysis"].store( XaMin )
1030         if selfA._toStore("APosterioriCovariance"):
1031             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
1032         if selfA._toStore("BMA"):
1033             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
1034     #
1035     return 0
1036
1037 # ==============================================================================
1038 def cekf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
1039     """
1040     Contrained Extended Kalman Filter
1041     """
1042     if selfA._parameters["EstimationOf"] == "Parameters":
1043         selfA._parameters["StoreInternalVariables"] = True
1044     selfA._parameters["Bounds"] = ForceNumericBounds( selfA._parameters["Bounds"] )
1045     #
1046     # Opérateurs
1047     H = HO["Direct"].appliedControledFormTo
1048     #
1049     if selfA._parameters["EstimationOf"] == "State":
1050         M = EM["Direct"].appliedControledFormTo
1051     #
1052     if CM is not None and "Tangent" in CM and U is not None:
1053         Cm = CM["Tangent"].asMatrix(Xb)
1054     else:
1055         Cm = None
1056     #
1057     # Durée d'observation et tailles
1058     if hasattr(Y,"stepnumber"):
1059         duration = Y.stepnumber()
1060         __p = numpy.cumprod(Y.shape())[-1]
1061     else:
1062         duration = 2
1063         __p = numpy.array(Y).size
1064     #
1065     # Précalcul des inversions de B et R
1066     if selfA._parameters["StoreInternalVariables"] \
1067         or selfA._toStore("CostFunctionJ") \
1068         or selfA._toStore("CostFunctionJb") \
1069         or selfA._toStore("CostFunctionJo") \
1070         or selfA._toStore("CurrentOptimum") \
1071         or selfA._toStore("APosterioriCovariance"):
1072         BI = B.getI()
1073         RI = R.getI()
1074     #
1075     __n = Xb.size
1076     #
1077     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
1078         Xn = Xb
1079         Pn = B
1080         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1081         selfA.StoredVariables["Analysis"].store( Xb )
1082         if selfA._toStore("APosterioriCovariance"):
1083             if hasattr(B,"asfullmatrix"):
1084                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
1085             else:
1086                 selfA.StoredVariables["APosterioriCovariance"].store( B )
1087         selfA._setInternalState("seed", numpy.random.get_state())
1088     elif selfA._parameters["nextStep"]:
1089         Xn = selfA._getInternalState("Xn")
1090         Pn = selfA._getInternalState("Pn")
1091     #
1092     if selfA._parameters["EstimationOf"] == "Parameters":
1093         XaMin            = Xn
1094         previousJMinimum = numpy.finfo(float).max
1095     #
1096     for step in range(duration-1):
1097         if hasattr(Y,"store"):
1098             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
1099         else:
1100             Ynpu = numpy.ravel( Y ).reshape((__p,1))
1101         #
1102         Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xn)
1103         Ht = Ht.reshape(Ynpu.size,Xn.size) # ADAO & check shape
1104         Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = Xn)
1105         Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape
1106         #
1107         if selfA._parameters["EstimationOf"] == "State":
1108             Mt = EM["Tangent"].asMatrix(ValueForMethodForm = Xn)
1109             Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape
1110             Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = Xn)
1111             Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape
1112         #
1113         if U is not None:
1114             if hasattr(U,"store") and len(U)>1:
1115                 Un = numpy.ravel( U[step] ).reshape((-1,1))
1116             elif hasattr(U,"store") and len(U)==1:
1117                 Un = numpy.ravel( U[0] ).reshape((-1,1))
1118             else:
1119                 Un = numpy.ravel( U ).reshape((-1,1))
1120         else:
1121             Un = None
1122         #
1123         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
1124             Xn = ApplyBounds( Xn, selfA._parameters["Bounds"] )
1125         #
1126         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
1127             Xn_predicted = numpy.ravel( M( (Xn, Un) ) ).reshape((__n,1))
1128             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
1129                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
1130                 Xn_predicted = Xn_predicted + Cm * Un
1131             Pn_predicted = Q + Mt * (Pn * Ma)
1132         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
1133             # --- > Par principe, M = Id, Q = 0
1134             Xn_predicted = Xn
1135             Pn_predicted = Pn
1136         #
1137         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
1138             Xn_predicted = ApplyBounds( Xn_predicted, selfA._parameters["Bounds"] )
1139         #
1140         if selfA._parameters["EstimationOf"] == "State":
1141             HX_predicted = numpy.ravel( H( (Xn_predicted, None) ) ).reshape((__p,1))
1142             _Innovation  = Ynpu - HX_predicted
1143         elif selfA._parameters["EstimationOf"] == "Parameters":
1144             HX_predicted = numpy.ravel( H( (Xn_predicted, Un) ) ).reshape((__p,1))
1145             _Innovation  = Ynpu - HX_predicted
1146             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
1147                 _Innovation = _Innovation - Cm * Un
1148         #
1149         Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
1150         Xn = Xn_predicted + Kn * _Innovation
1151         Pn = Pn_predicted - Kn * Ht * Pn_predicted
1152         #
1153         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
1154             Xn = ApplyBounds( Xn, selfA._parameters["Bounds"] )
1155         #
1156         Xa = Xn # Pointeurs
1157         #--------------------------
1158         selfA._setInternalState("Xn", Xn)
1159         selfA._setInternalState("Pn", Pn)
1160         #--------------------------
1161         #
1162         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1163         # ---> avec analysis
1164         selfA.StoredVariables["Analysis"].store( Xa )
1165         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
1166             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( H((Xa, Un)) )
1167         if selfA._toStore("InnovationAtCurrentAnalysis"):
1168             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
1169         # ---> avec current state
1170         if selfA._parameters["StoreInternalVariables"] \
1171             or selfA._toStore("CurrentState"):
1172             selfA.StoredVariables["CurrentState"].store( Xn )
1173         if selfA._toStore("ForecastState"):
1174             selfA.StoredVariables["ForecastState"].store( Xn_predicted )
1175         if selfA._toStore("ForecastCovariance"):
1176             selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted )
1177         if selfA._toStore("BMA"):
1178             selfA.StoredVariables["BMA"].store( Xn_predicted - Xa )
1179         if selfA._toStore("InnovationAtCurrentState"):
1180             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
1181         if selfA._toStore("SimulatedObservationAtCurrentState") \
1182             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1183             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
1184         # ---> autres
1185         if selfA._parameters["StoreInternalVariables"] \
1186             or selfA._toStore("CostFunctionJ") \
1187             or selfA._toStore("CostFunctionJb") \
1188             or selfA._toStore("CostFunctionJo") \
1189             or selfA._toStore("CurrentOptimum") \
1190             or selfA._toStore("APosterioriCovariance"):
1191             Jb  = float( 0.5 * (Xa - Xb).T @ (BI @ (Xa - Xb)) )
1192             Jo  = float( 0.5 * _Innovation.T @ (RI @ _Innovation) )
1193             J   = Jb + Jo
1194             selfA.StoredVariables["CostFunctionJb"].store( Jb )
1195             selfA.StoredVariables["CostFunctionJo"].store( Jo )
1196             selfA.StoredVariables["CostFunctionJ" ].store( J )
1197             #
1198             if selfA._toStore("IndexOfOptimum") \
1199                 or selfA._toStore("CurrentOptimum") \
1200                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
1201                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
1202                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
1203                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1204                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1205             if selfA._toStore("IndexOfOptimum"):
1206                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1207             if selfA._toStore("CurrentOptimum"):
1208                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
1209             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1210                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
1211             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1212                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1213             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1214                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1215             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1216                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1217         if selfA._toStore("APosterioriCovariance"):
1218             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
1219         if selfA._parameters["EstimationOf"] == "Parameters" \
1220             and J < previousJMinimum:
1221             previousJMinimum    = J
1222             XaMin               = Xa
1223             if selfA._toStore("APosterioriCovariance"):
1224                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
1225     #
1226     # Stockage final supplémentaire de l'optimum en estimation de paramètres
1227     # ----------------------------------------------------------------------
1228     if selfA._parameters["EstimationOf"] == "Parameters":
1229         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1230         selfA.StoredVariables["Analysis"].store( XaMin )
1231         if selfA._toStore("APosterioriCovariance"):
1232             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
1233         if selfA._toStore("BMA"):
1234             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
1235     #
1236     return 0
1237
1238 # ==============================================================================
1239 def enks(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="EnKS16-KalmanFilterFormula"):
1240     """
1241     EnKS
1242     """
1243     #
1244     # Opérateurs
1245     H = HO["Direct"].appliedControledFormTo
1246     #
1247     if selfA._parameters["EstimationOf"] == "State":
1248         M = EM["Direct"].appliedControledFormTo
1249     #
1250     if CM is not None and "Tangent" in CM and U is not None:
1251         Cm = CM["Tangent"].asMatrix(Xb)
1252     else:
1253         Cm = None
1254     #
1255     # Précalcul des inversions de B et R
1256     RIdemi = R.sqrtmI()
1257     #
1258     # Durée d'observation et tailles
1259     LagL = selfA._parameters["SmootherLagL"]
1260     if (not hasattr(Y,"store")) or (not hasattr(Y,"stepnumber")):
1261         raise ValueError("Fixed-lag smoother requires a series of observation")
1262     if Y.stepnumber() < LagL:
1263         raise ValueError("Fixed-lag smoother requires a series of observation greater then the lag L")
1264     duration = Y.stepnumber()
1265     __p = numpy.cumprod(Y.shape())[-1]
1266     __n = Xb.size
1267     __m = selfA._parameters["NumberOfMembers"]
1268     #
1269     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
1270         selfA.StoredVariables["Analysis"].store( Xb )
1271         if selfA._toStore("APosterioriCovariance"):
1272             if hasattr(B,"asfullmatrix"):
1273                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
1274             else:
1275                 selfA.StoredVariables["APosterioriCovariance"].store( B )
1276     #
1277     # Calcul direct initial (on privilégie la mémorisation au recalcul)
1278     __seed = numpy.random.get_state()
1279     selfB = copy.deepcopy(selfA)
1280     selfB._parameters["StoreSupplementaryCalculations"] = ["CurrentEnsembleState"]
1281     if VariantM == "EnKS16-KalmanFilterFormula":
1282         etkf(selfB, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM = "KalmanFilterFormula")
1283     else:
1284         raise ValueError("VariantM has to be chosen in the authorized methods list.")
1285     if LagL > 0:
1286         EL  = selfB.StoredVariables["CurrentEnsembleState"][LagL-1]
1287     else:
1288         EL = EnsembleOfBackgroundPerturbations( Xb, None, __m ) # Cf. etkf
1289     selfA._parameters["SetSeed"] = numpy.random.set_state(__seed)
1290     #
1291     for step in range(LagL,duration-1):
1292         #
1293         sEL = selfB.StoredVariables["CurrentEnsembleState"][step+1-LagL:step+1]
1294         sEL.append(None)
1295         #
1296         if hasattr(Y,"store"):
1297             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
1298         else:
1299             Ynpu = numpy.ravel( Y ).reshape((__p,1))
1300         #
1301         if U is not None:
1302             if hasattr(U,"store") and len(U)>1:
1303                 Un = numpy.ravel( U[step] ).reshape((-1,1))
1304             elif hasattr(U,"store") and len(U)==1:
1305                 Un = numpy.ravel( U[0] ).reshape((-1,1))
1306             else:
1307                 Un = numpy.ravel( U ).reshape((-1,1))
1308         else:
1309             Un = None
1310         #
1311         #--------------------------
1312         if VariantM == "EnKS16-KalmanFilterFormula":
1313             if selfA._parameters["EstimationOf"] == "State": # Forecast
1314                 EL = M( [(EL[:,i], Un) for i in range(__m)],
1315                     argsAsSerie = True,
1316                     returnSerieAsArrayMatrix = True )
1317                 EL = EnsemblePerturbationWithGivenCovariance( EL, Q )
1318                 EZ = H( [(EL[:,i], Un) for i in range(__m)],
1319                     argsAsSerie = True,
1320                     returnSerieAsArrayMatrix = True )
1321                 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
1322                     Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
1323                     EZ = EZ + Cm * Un
1324             elif selfA._parameters["EstimationOf"] == "Parameters":
1325                 # --- > Par principe, M = Id, Q = 0
1326                 EZ = H( [(EL[:,i], Un) for i in range(__m)],
1327                     argsAsSerie = True,
1328                     returnSerieAsArrayMatrix = True )
1329             #
1330             vEm   = EL.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1331             vZm   = EZ.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
1332             #
1333             mS    = RIdemi @ EnsembleOfAnomalies( EZ, vZm, 1./math.sqrt(__m-1) )
1334             mS    = mS.reshape((-1,__m)) # Pour dimension 1
1335             delta = RIdemi @ ( Ynpu - vZm )
1336             mT    = numpy.linalg.inv( numpy.identity(__m) + mS.T @ mS )
1337             vw    = mT @ mS.T @ delta
1338             #
1339             Tdemi = numpy.real(scipy.linalg.sqrtm(mT))
1340             mU    = numpy.identity(__m)
1341             wTU   = (vw.reshape((__m,1)) + math.sqrt(__m-1) * Tdemi @ mU)
1342             #
1343             EX    = EnsembleOfAnomalies( EL, vEm, 1./math.sqrt(__m-1) )
1344             EL    = vEm + EX @ wTU
1345             #
1346             sEL[LagL] = EL
1347             for irl in range(LagL): # Lissage des L précédentes analysis
1348                 vEm = sEL[irl].mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1349                 EX = EnsembleOfAnomalies( sEL[irl], vEm, 1./math.sqrt(__m-1) )
1350                 sEL[irl] = vEm + EX @ wTU
1351             #
1352             # Conservation de l'analyse retrospective d'ordre 0 avant rotation
1353             Xa = sEL[0].mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1354             if selfA._toStore("APosterioriCovariance"):
1355                 EXn = sEL[0]
1356             #
1357             for irl in range(LagL):
1358                 sEL[irl] = sEL[irl+1]
1359             sEL[LagL] = None
1360         #--------------------------
1361         else:
1362             raise ValueError("VariantM has to be chosen in the authorized methods list.")
1363         #
1364         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1365         # ---> avec analysis
1366         selfA.StoredVariables["Analysis"].store( Xa )
1367         if selfA._toStore("APosterioriCovariance"):
1368             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(EXn) )
1369     #
1370     # Stockage des dernières analyses incomplètement remises à jour
1371     for irl in range(LagL):
1372         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1373         Xa = sEL[irl].mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1374         selfA.StoredVariables["Analysis"].store( Xa )
1375     #
1376     return 0
1377
1378 # ==============================================================================
1379 def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"):
1380     """
1381     Ensemble-Transform EnKF
1382     """
1383     if selfA._parameters["EstimationOf"] == "Parameters":
1384         selfA._parameters["StoreInternalVariables"] = True
1385     #
1386     # Opérateurs
1387     H = HO["Direct"].appliedControledFormTo
1388     #
1389     if selfA._parameters["EstimationOf"] == "State":
1390         M = EM["Direct"].appliedControledFormTo
1391     #
1392     if CM is not None and "Tangent" in CM and U is not None:
1393         Cm = CM["Tangent"].asMatrix(Xb)
1394     else:
1395         Cm = None
1396     #
1397     # Durée d'observation et tailles
1398     if hasattr(Y,"stepnumber"):
1399         duration = Y.stepnumber()
1400         __p = numpy.cumprod(Y.shape())[-1]
1401     else:
1402         duration = 2
1403         __p = numpy.array(Y).size
1404     #
1405     # Précalcul des inversions de B et R
1406     if selfA._parameters["StoreInternalVariables"] \
1407         or selfA._toStore("CostFunctionJ") \
1408         or selfA._toStore("CostFunctionJb") \
1409         or selfA._toStore("CostFunctionJo") \
1410         or selfA._toStore("CurrentOptimum") \
1411         or selfA._toStore("APosterioriCovariance"):
1412         BI = B.getI()
1413         RI = R.getI()
1414     elif VariantM != "KalmanFilterFormula":
1415         RI = R.getI()
1416     if VariantM == "KalmanFilterFormula":
1417         RIdemi = R.sqrtmI()
1418     #
1419     __n = Xb.size
1420     __m = selfA._parameters["NumberOfMembers"]
1421     #
1422     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
1423         Xn = EnsembleOfBackgroundPerturbations( Xb, None, __m )
1424         selfA.StoredVariables["Analysis"].store( Xb )
1425         if selfA._toStore("APosterioriCovariance"):
1426             if hasattr(B,"asfullmatrix"):
1427                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
1428             else:
1429                 selfA.StoredVariables["APosterioriCovariance"].store( B )
1430         selfA._setInternalState("seed", numpy.random.get_state())
1431     elif selfA._parameters["nextStep"]:
1432         Xn = selfA._getInternalState("Xn")
1433     #
1434     previousJMinimum = numpy.finfo(float).max
1435     #
1436     for step in range(duration-1):
1437         numpy.random.set_state(selfA._getInternalState("seed"))
1438         if hasattr(Y,"store"):
1439             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
1440         else:
1441             Ynpu = numpy.ravel( Y ).reshape((__p,1))
1442         #
1443         if U is not None:
1444             if hasattr(U,"store") and len(U)>1:
1445                 Un = numpy.ravel( U[step] ).reshape((-1,1))
1446             elif hasattr(U,"store") and len(U)==1:
1447                 Un = numpy.ravel( U[0] ).reshape((-1,1))
1448             else:
1449                 Un = numpy.ravel( U ).reshape((-1,1))
1450         else:
1451             Un = None
1452         #
1453         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
1454             Xn = CovarianceInflation( Xn,
1455                 selfA._parameters["InflationType"],
1456                 selfA._parameters["InflationFactor"],
1457                 )
1458         #
1459         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
1460             EMX = M( [(Xn[:,i], Un) for i in range(__m)],
1461                 argsAsSerie = True,
1462                 returnSerieAsArrayMatrix = True )
1463             Xn_predicted = EnsemblePerturbationWithGivenCovariance( EMX, Q )
1464             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
1465                 argsAsSerie = True,
1466                 returnSerieAsArrayMatrix = True )
1467             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
1468                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
1469                 Xn_predicted = Xn_predicted + Cm * Un
1470         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
1471             # --- > Par principe, M = Id, Q = 0
1472             Xn_predicted = EMX = Xn
1473             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
1474                 argsAsSerie = True,
1475                 returnSerieAsArrayMatrix = True )
1476         #
1477         # Mean of forecast and observation of forecast
1478         Xfm  = Xn_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1479         Hfm  = HX_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
1480         #
1481         # Anomalies
1482         EaX   = EnsembleOfAnomalies( Xn_predicted, Xfm )
1483         EaHX  = EnsembleOfAnomalies( HX_predicted, Hfm)
1484         #
1485         #--------------------------
1486         if VariantM == "KalmanFilterFormula":
1487             mS    = RIdemi * EaHX / math.sqrt(__m-1)
1488             mS    = mS.reshape((-1,__m)) # Pour dimension 1
1489             delta = RIdemi * ( Ynpu - Hfm )
1490             mT    = numpy.linalg.inv( numpy.identity(__m) + mS.T @ mS )
1491             vw    = mT @ mS.T @ delta
1492             #
1493             Tdemi = numpy.real(scipy.linalg.sqrtm(mT))
1494             mU    = numpy.identity(__m)
1495             #
1496             EaX   = EaX / math.sqrt(__m-1)
1497             Xn    = Xfm + EaX @ ( vw.reshape((__m,1)) + math.sqrt(__m-1) * Tdemi @ mU )
1498         #--------------------------
1499         elif VariantM == "Variational":
1500             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
1501             def CostFunction(w):
1502                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1503                 _Jo = 0.5 * _A.T @ (RI * _A)
1504                 _Jb = 0.5 * (__m-1) * w.T @ w
1505                 _J  = _Jo + _Jb
1506                 return float(_J)
1507             def GradientOfCostFunction(w):
1508                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1509                 _GardJo = - EaHX.T @ (RI * _A)
1510                 _GradJb = (__m-1) * w.reshape((__m,1))
1511                 _GradJ  = _GardJo + _GradJb
1512                 return numpy.ravel(_GradJ)
1513             vw = scipy.optimize.fmin_cg(
1514                 f           = CostFunction,
1515                 x0          = numpy.zeros(__m),
1516                 fprime      = GradientOfCostFunction,
1517                 args        = (),
1518                 disp        = False,
1519                 )
1520             #
1521             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1522             Htb = (__m-1) * numpy.identity(__m)
1523             Hta = Hto + Htb
1524             #
1525             Pta = numpy.linalg.inv( Hta )
1526             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1527             #
1528             Xn  = Xfm + EaX @ (vw[:,None] + EWa)
1529         #--------------------------
1530         elif VariantM == "FiniteSize11": # Jauge Boc2011
1531             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
1532             def CostFunction(w):
1533                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1534                 _Jo = 0.5 * _A.T @ (RI * _A)
1535                 _Jb = 0.5 * __m * math.log(1 + 1/__m + w.T @ w)
1536                 _J  = _Jo + _Jb
1537                 return float(_J)
1538             def GradientOfCostFunction(w):
1539                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1540                 _GardJo = - EaHX.T @ (RI * _A)
1541                 _GradJb = __m * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w)
1542                 _GradJ  = _GardJo + _GradJb
1543                 return numpy.ravel(_GradJ)
1544             vw = scipy.optimize.fmin_cg(
1545                 f           = CostFunction,
1546                 x0          = numpy.zeros(__m),
1547                 fprime      = GradientOfCostFunction,
1548                 args        = (),
1549                 disp        = False,
1550                 )
1551             #
1552             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1553             Htb = __m * \
1554                 ( (1 + 1/__m + vw.T @ vw) * numpy.identity(__m) - 2 * vw @ vw.T ) \
1555                 / (1 + 1/__m + vw.T @ vw)**2
1556             Hta = Hto + Htb
1557             #
1558             Pta = numpy.linalg.inv( Hta )
1559             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1560             #
1561             Xn  = Xfm + EaX @ (vw.reshape((__m,1)) + EWa)
1562         #--------------------------
1563         elif VariantM == "FiniteSize15": # Jauge Boc2015
1564             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
1565             def CostFunction(w):
1566                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1567                 _Jo = 0.5 * _A.T * RI * _A
1568                 _Jb = 0.5 * (__m+1) * math.log(1 + 1/__m + w.T @ w)
1569                 _J  = _Jo + _Jb
1570                 return float(_J)
1571             def GradientOfCostFunction(w):
1572                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1573                 _GardJo = - EaHX.T @ (RI * _A)
1574                 _GradJb = (__m+1) * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w)
1575                 _GradJ  = _GardJo + _GradJb
1576                 return numpy.ravel(_GradJ)
1577             vw = scipy.optimize.fmin_cg(
1578                 f           = CostFunction,
1579                 x0          = numpy.zeros(__m),
1580                 fprime      = GradientOfCostFunction,
1581                 args        = (),
1582                 disp        = False,
1583                 )
1584             #
1585             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1586             Htb = (__m+1) * \
1587                 ( (1 + 1/__m + vw.T @ vw) * numpy.identity(__m) - 2 * vw @ vw.T ) \
1588                 / (1 + 1/__m + vw.T @ vw)**2
1589             Hta = Hto + Htb
1590             #
1591             Pta = numpy.linalg.inv( Hta )
1592             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1593             #
1594             Xn  = Xfm + EaX @ (vw.reshape((__m,1)) + EWa)
1595         #--------------------------
1596         elif VariantM == "FiniteSize16": # Jauge Boc2016
1597             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
1598             def CostFunction(w):
1599                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1600                 _Jo = 0.5 * _A.T @ (RI * _A)
1601                 _Jb = 0.5 * (__m+1) * math.log(1 + 1/__m + w.T @ w / (__m-1))
1602                 _J  = _Jo + _Jb
1603                 return float(_J)
1604             def GradientOfCostFunction(w):
1605                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1606                 _GardJo = - EaHX.T @ (RI * _A)
1607                 _GradJb = ((__m+1) / (__m-1)) * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w / (__m-1))
1608                 _GradJ  = _GardJo + _GradJb
1609                 return numpy.ravel(_GradJ)
1610             vw = scipy.optimize.fmin_cg(
1611                 f           = CostFunction,
1612                 x0          = numpy.zeros(__m),
1613                 fprime      = GradientOfCostFunction,
1614                 args        = (),
1615                 disp        = False,
1616                 )
1617             #
1618             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1619             Htb = ((__m+1) / (__m-1)) * \
1620                 ( (1 + 1/__m + vw.T @ vw / (__m-1)) * numpy.identity(__m) - 2 * vw @ vw.T / (__m-1) ) \
1621                 / (1 + 1/__m + vw.T @ vw / (__m-1))**2
1622             Hta = Hto + Htb
1623             #
1624             Pta = numpy.linalg.inv( Hta )
1625             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1626             #
1627             Xn  = Xfm + EaX @ (vw[:,None] + EWa)
1628         #--------------------------
1629         else:
1630             raise ValueError("VariantM has to be chosen in the authorized methods list.")
1631         #
1632         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
1633             Xn = CovarianceInflation( Xn,
1634                 selfA._parameters["InflationType"],
1635                 selfA._parameters["InflationFactor"],
1636                 )
1637         #
1638         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1639         #--------------------------
1640         selfA._setInternalState("Xn", Xn)
1641         selfA._setInternalState("seed", numpy.random.get_state())
1642         #--------------------------
1643         #
1644         if selfA._parameters["StoreInternalVariables"] \
1645             or selfA._toStore("CostFunctionJ") \
1646             or selfA._toStore("CostFunctionJb") \
1647             or selfA._toStore("CostFunctionJo") \
1648             or selfA._toStore("APosterioriCovariance") \
1649             or selfA._toStore("InnovationAtCurrentAnalysis") \
1650             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
1651             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1652             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
1653             _Innovation = Ynpu - _HXa
1654         #
1655         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1656         # ---> avec analysis
1657         selfA.StoredVariables["Analysis"].store( Xa )
1658         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
1659             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
1660         if selfA._toStore("InnovationAtCurrentAnalysis"):
1661             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
1662         # ---> avec current state
1663         if selfA._parameters["StoreInternalVariables"] \
1664             or selfA._toStore("CurrentState"):
1665             selfA.StoredVariables["CurrentState"].store( Xn )
1666         if selfA._toStore("ForecastState"):
1667             selfA.StoredVariables["ForecastState"].store( EMX )
1668         if selfA._toStore("ForecastCovariance"):
1669             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(EMX) )
1670         if selfA._toStore("BMA"):
1671             selfA.StoredVariables["BMA"].store( EMX - Xa )
1672         if selfA._toStore("InnovationAtCurrentState"):
1673             selfA.StoredVariables["InnovationAtCurrentState"].store( - HX_predicted + Ynpu )
1674         if selfA._toStore("SimulatedObservationAtCurrentState") \
1675             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1676             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
1677         # ---> autres
1678         if selfA._parameters["StoreInternalVariables"] \
1679             or selfA._toStore("CostFunctionJ") \
1680             or selfA._toStore("CostFunctionJb") \
1681             or selfA._toStore("CostFunctionJo") \
1682             or selfA._toStore("CurrentOptimum") \
1683             or selfA._toStore("APosterioriCovariance"):
1684             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
1685             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
1686             J   = Jb + Jo
1687             selfA.StoredVariables["CostFunctionJb"].store( Jb )
1688             selfA.StoredVariables["CostFunctionJo"].store( Jo )
1689             selfA.StoredVariables["CostFunctionJ" ].store( J )
1690             #
1691             if selfA._toStore("IndexOfOptimum") \
1692                 or selfA._toStore("CurrentOptimum") \
1693                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
1694                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
1695                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
1696                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1697                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1698             if selfA._toStore("IndexOfOptimum"):
1699                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1700             if selfA._toStore("CurrentOptimum"):
1701                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
1702             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1703                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
1704             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1705                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1706             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1707                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1708             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1709                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1710         if selfA._toStore("APosterioriCovariance"):
1711             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
1712         if selfA._parameters["EstimationOf"] == "Parameters" \
1713             and J < previousJMinimum:
1714             previousJMinimum    = J
1715             XaMin               = Xa
1716             if selfA._toStore("APosterioriCovariance"):
1717                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
1718         # ---> Pour les smoothers
1719         if selfA._toStore("CurrentEnsembleState"):
1720             selfA.StoredVariables["CurrentEnsembleState"].store( Xn )
1721     #
1722     # Stockage final supplémentaire de l'optimum en estimation de paramètres
1723     # ----------------------------------------------------------------------
1724     if selfA._parameters["EstimationOf"] == "Parameters":
1725         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1726         selfA.StoredVariables["Analysis"].store( XaMin )
1727         if selfA._toStore("APosterioriCovariance"):
1728             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
1729         if selfA._toStore("BMA"):
1730             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
1731     #
1732     return 0
1733
1734 # ==============================================================================
1735 def exkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
1736     """
1737     Extended Kalman Filter
1738     """
1739     if selfA._parameters["EstimationOf"] == "Parameters":
1740         selfA._parameters["StoreInternalVariables"] = True
1741     #
1742     # Opérateurs
1743     H = HO["Direct"].appliedControledFormTo
1744     #
1745     if selfA._parameters["EstimationOf"] == "State":
1746         M = EM["Direct"].appliedControledFormTo
1747     #
1748     if CM is not None and "Tangent" in CM and U is not None:
1749         Cm = CM["Tangent"].asMatrix(Xb)
1750     else:
1751         Cm = None
1752     #
1753     # Durée d'observation et tailles
1754     if hasattr(Y,"stepnumber"):
1755         duration = Y.stepnumber()
1756         __p = numpy.cumprod(Y.shape())[-1]
1757     else:
1758         duration = 2
1759         __p = numpy.array(Y).size
1760     #
1761     # Précalcul des inversions de B et R
1762     if selfA._parameters["StoreInternalVariables"] \
1763         or selfA._toStore("CostFunctionJ") \
1764         or selfA._toStore("CostFunctionJb") \
1765         or selfA._toStore("CostFunctionJo") \
1766         or selfA._toStore("CurrentOptimum") \
1767         or selfA._toStore("APosterioriCovariance"):
1768         BI = B.getI()
1769         RI = R.getI()
1770     #
1771     __n = Xb.size
1772     #
1773     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
1774         Xn = Xb
1775         Pn = B
1776         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1777         selfA.StoredVariables["Analysis"].store( Xb )
1778         if selfA._toStore("APosterioriCovariance"):
1779             if hasattr(B,"asfullmatrix"):
1780                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
1781             else:
1782                 selfA.StoredVariables["APosterioriCovariance"].store( B )
1783         selfA._setInternalState("seed", numpy.random.get_state())
1784     elif selfA._parameters["nextStep"]:
1785         Xn = selfA._getInternalState("Xn")
1786         Pn = selfA._getInternalState("Pn")
1787     #
1788     if selfA._parameters["EstimationOf"] == "Parameters":
1789         XaMin            = Xn
1790         previousJMinimum = numpy.finfo(float).max
1791     #
1792     for step in range(duration-1):
1793         if hasattr(Y,"store"):
1794             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
1795         else:
1796             Ynpu = numpy.ravel( Y ).reshape((__p,1))
1797         #
1798         Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xn)
1799         Ht = Ht.reshape(Ynpu.size,Xn.size) # ADAO & check shape
1800         Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = Xn)
1801         Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape
1802         #
1803         if selfA._parameters["EstimationOf"] == "State":
1804             Mt = EM["Tangent"].asMatrix(ValueForMethodForm = Xn)
1805             Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape
1806             Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = Xn)
1807             Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape
1808         #
1809         if U is not None:
1810             if hasattr(U,"store") and len(U)>1:
1811                 Un = numpy.ravel( U[step] ).reshape((-1,1))
1812             elif hasattr(U,"store") and len(U)==1:
1813                 Un = numpy.ravel( U[0] ).reshape((-1,1))
1814             else:
1815                 Un = numpy.ravel( U ).reshape((-1,1))
1816         else:
1817             Un = None
1818         #
1819         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
1820             Xn_predicted = numpy.ravel( M( (Xn, Un) ) ).reshape((__n,1))
1821             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
1822                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
1823                 Xn_predicted = Xn_predicted + Cm * Un
1824             Pn_predicted = Q + Mt * (Pn * Ma)
1825         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
1826             # --- > Par principe, M = Id, Q = 0
1827             Xn_predicted = Xn
1828             Pn_predicted = Pn
1829         #
1830         if selfA._parameters["EstimationOf"] == "State":
1831             HX_predicted = numpy.ravel( H( (Xn_predicted, None) ) ).reshape((__p,1))
1832             _Innovation  = Ynpu - HX_predicted
1833         elif selfA._parameters["EstimationOf"] == "Parameters":
1834             HX_predicted = numpy.ravel( H( (Xn_predicted, Un) ) ).reshape((__p,1))
1835             _Innovation  = Ynpu - HX_predicted
1836             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
1837                 _Innovation = _Innovation - Cm * Un
1838         #
1839         Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
1840         Xn = Xn_predicted + Kn * _Innovation
1841         Pn = Pn_predicted - Kn * Ht * Pn_predicted
1842         #
1843         Xa = Xn # Pointeurs
1844         #--------------------------
1845         selfA._setInternalState("Xn", Xn)
1846         selfA._setInternalState("Pn", Pn)
1847         #--------------------------
1848         #
1849         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1850         # ---> avec analysis
1851         selfA.StoredVariables["Analysis"].store( Xa )
1852         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
1853             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( H((Xa, Un)) )
1854         if selfA._toStore("InnovationAtCurrentAnalysis"):
1855             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
1856         # ---> avec current state
1857         if selfA._parameters["StoreInternalVariables"] \
1858             or selfA._toStore("CurrentState"):
1859             selfA.StoredVariables["CurrentState"].store( Xn )
1860         if selfA._toStore("ForecastState"):
1861             selfA.StoredVariables["ForecastState"].store( Xn_predicted )
1862         if selfA._toStore("ForecastCovariance"):
1863             selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted )
1864         if selfA._toStore("BMA"):
1865             selfA.StoredVariables["BMA"].store( Xn_predicted - Xa )
1866         if selfA._toStore("InnovationAtCurrentState"):
1867             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
1868         if selfA._toStore("SimulatedObservationAtCurrentState") \
1869             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1870             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
1871         # ---> autres
1872         if selfA._parameters["StoreInternalVariables"] \
1873             or selfA._toStore("CostFunctionJ") \
1874             or selfA._toStore("CostFunctionJb") \
1875             or selfA._toStore("CostFunctionJo") \
1876             or selfA._toStore("CurrentOptimum") \
1877             or selfA._toStore("APosterioriCovariance"):
1878             Jb  = float( 0.5 * (Xa - Xb).T @ (BI @ (Xa - Xb)) )
1879             Jo  = float( 0.5 * _Innovation.T @ (RI @ _Innovation) )
1880             J   = Jb + Jo
1881             selfA.StoredVariables["CostFunctionJb"].store( Jb )
1882             selfA.StoredVariables["CostFunctionJo"].store( Jo )
1883             selfA.StoredVariables["CostFunctionJ" ].store( J )
1884             #
1885             if selfA._toStore("IndexOfOptimum") \
1886                 or selfA._toStore("CurrentOptimum") \
1887                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
1888                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
1889                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
1890                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1891                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1892             if selfA._toStore("IndexOfOptimum"):
1893                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1894             if selfA._toStore("CurrentOptimum"):
1895                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
1896             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1897                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
1898             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1899                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1900             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1901                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1902             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1903                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1904         if selfA._toStore("APosterioriCovariance"):
1905             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
1906         if selfA._parameters["EstimationOf"] == "Parameters" \
1907             and J < previousJMinimum:
1908             previousJMinimum    = J
1909             XaMin               = Xa
1910             if selfA._toStore("APosterioriCovariance"):
1911                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
1912     #
1913     # Stockage final supplémentaire de l'optimum en estimation de paramètres
1914     # ----------------------------------------------------------------------
1915     if selfA._parameters["EstimationOf"] == "Parameters":
1916         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1917         selfA.StoredVariables["Analysis"].store( XaMin )
1918         if selfA._toStore("APosterioriCovariance"):
1919             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
1920         if selfA._toStore("BMA"):
1921             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
1922     #
1923     return 0
1924
1925 # ==============================================================================
1926 def ienkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="IEnKF12",
1927     BnotT=False, _epsilon=1.e-3, _e=1.e-7, _jmax=15000):
1928     """
1929     Iterative EnKF
1930     """
1931     if selfA._parameters["EstimationOf"] == "Parameters":
1932         selfA._parameters["StoreInternalVariables"] = True
1933     #
1934     # Opérateurs
1935     H = HO["Direct"].appliedControledFormTo
1936     #
1937     if selfA._parameters["EstimationOf"] == "State":
1938         M = EM["Direct"].appliedControledFormTo
1939     #
1940     if CM is not None and "Tangent" in CM and U is not None:
1941         Cm = CM["Tangent"].asMatrix(Xb)
1942     else:
1943         Cm = None
1944     #
1945     # Durée d'observation et tailles
1946     if hasattr(Y,"stepnumber"):
1947         duration = Y.stepnumber()
1948         __p = numpy.cumprod(Y.shape())[-1]
1949     else:
1950         duration = 2
1951         __p = numpy.array(Y).size
1952     #
1953     # Précalcul des inversions de B et R
1954     if selfA._parameters["StoreInternalVariables"] \
1955         or selfA._toStore("CostFunctionJ") \
1956         or selfA._toStore("CostFunctionJb") \
1957         or selfA._toStore("CostFunctionJo") \
1958         or selfA._toStore("CurrentOptimum") \
1959         or selfA._toStore("APosterioriCovariance"):
1960         BI = B.getI()
1961     RI = R.getI()
1962     #
1963     __n = Xb.size
1964     __m = selfA._parameters["NumberOfMembers"]
1965     #
1966     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
1967         if hasattr(B,"asfullmatrix"): Pn = B.asfullmatrix(__n)
1968         else:                         Pn = B
1969         Xn = EnsembleOfBackgroundPerturbations( Xb, Pn, __m )
1970         selfA.StoredVariables["Analysis"].store( Xb )
1971         if selfA._toStore("APosterioriCovariance"):
1972             if hasattr(B,"asfullmatrix"):
1973                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
1974             else:
1975                 selfA.StoredVariables["APosterioriCovariance"].store( B )
1976         selfA._setInternalState("seed", numpy.random.get_state())
1977     elif selfA._parameters["nextStep"]:
1978         Xn = selfA._getInternalState("Xn")
1979     #
1980     previousJMinimum = numpy.finfo(float).max
1981     #
1982     for step in range(duration-1):
1983         numpy.random.set_state(selfA._getInternalState("seed"))
1984         if hasattr(Y,"store"):
1985             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
1986         else:
1987             Ynpu = numpy.ravel( Y ).reshape((__p,1))
1988         #
1989         if U is not None:
1990             if hasattr(U,"store") and len(U)>1:
1991                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
1992             elif hasattr(U,"store") and len(U)==1:
1993                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
1994             else:
1995                 Un = numpy.asmatrix(numpy.ravel( U )).T
1996         else:
1997             Un = None
1998         #
1999         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
2000             Xn = CovarianceInflation( Xn,
2001                 selfA._parameters["InflationType"],
2002                 selfA._parameters["InflationFactor"],
2003                 )
2004         #
2005         #--------------------------
2006         if VariantM == "IEnKF12":
2007             Xfm = numpy.ravel(Xn.mean(axis=1, dtype=mfp).astype('float'))
2008             EaX = EnsembleOfAnomalies( Xn ) / math.sqrt(__m-1)
2009             __j = 0
2010             Deltaw = 1
2011             if not BnotT:
2012                 Ta  = numpy.identity(__m)
2013             vw  = numpy.zeros(__m)
2014             while numpy.linalg.norm(Deltaw) >= _e and __j <= _jmax:
2015                 vx1 = (Xfm + EaX @ vw).reshape((__n,1))
2016                 #
2017                 if BnotT:
2018                     E1 = vx1 + _epsilon * EaX
2019                 else:
2020                     E1 = vx1 + math.sqrt(__m-1) * EaX @ Ta
2021                 #
2022                 if selfA._parameters["EstimationOf"] == "State": # Forecast + Q
2023                     E2 = M( [(E1[:,i,numpy.newaxis], Un) for i in range(__m)],
2024                         argsAsSerie = True,
2025                         returnSerieAsArrayMatrix = True )
2026                 elif selfA._parameters["EstimationOf"] == "Parameters":
2027                     # --- > Par principe, M = Id
2028                     E2 = Xn
2029                 vx2 = E2.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
2030                 vy1 = H((vx2, Un)).reshape((__p,1))
2031                 #
2032                 HE2 = H( [(E2[:,i,numpy.newaxis], Un) for i in range(__m)],
2033                     argsAsSerie = True,
2034                     returnSerieAsArrayMatrix = True )
2035                 vy2 = HE2.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
2036                 #
2037                 if BnotT:
2038                     EaY = (HE2 - vy2) / _epsilon
2039                 else:
2040                     EaY = ( (HE2 - vy2) @ numpy.linalg.inv(Ta) ) / math.sqrt(__m-1)
2041                 #
2042                 GradJ = numpy.ravel(vw[:,None] - EaY.transpose() @ (RI * ( Ynpu - vy1 )))
2043                 mH = numpy.identity(__m) + EaY.transpose() @ (RI * EaY).reshape((-1,__m))
2044                 Deltaw = - numpy.linalg.solve(mH,GradJ)
2045                 #
2046                 vw = vw + Deltaw
2047                 #
2048                 if not BnotT:
2049                     Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
2050                 #
2051                 __j = __j + 1
2052             #
2053             A2 = EnsembleOfAnomalies( E2 )
2054             #
2055             if BnotT:
2056                 Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
2057                 A2 = math.sqrt(__m-1) * A2 @ Ta / _epsilon
2058             #
2059             Xn = vx2 + A2
2060         #--------------------------
2061         else:
2062             raise ValueError("VariantM has to be chosen in the authorized methods list.")
2063         #
2064         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
2065             Xn = CovarianceInflation( Xn,
2066                 selfA._parameters["InflationType"],
2067                 selfA._parameters["InflationFactor"],
2068                 )
2069         #
2070         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
2071         #--------------------------
2072         selfA._setInternalState("Xn", Xn)
2073         selfA._setInternalState("seed", numpy.random.get_state())
2074         #--------------------------
2075         #
2076         if selfA._parameters["StoreInternalVariables"] \
2077             or selfA._toStore("CostFunctionJ") \
2078             or selfA._toStore("CostFunctionJb") \
2079             or selfA._toStore("CostFunctionJo") \
2080             or selfA._toStore("APosterioriCovariance") \
2081             or selfA._toStore("InnovationAtCurrentAnalysis") \
2082             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
2083             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2084             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
2085             _Innovation = Ynpu - _HXa
2086         #
2087         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2088         # ---> avec analysis
2089         selfA.StoredVariables["Analysis"].store( Xa )
2090         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
2091             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
2092         if selfA._toStore("InnovationAtCurrentAnalysis"):
2093             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
2094         # ---> avec current state
2095         if selfA._parameters["StoreInternalVariables"] \
2096             or selfA._toStore("CurrentState"):
2097             selfA.StoredVariables["CurrentState"].store( Xn )
2098         if selfA._toStore("ForecastState"):
2099             selfA.StoredVariables["ForecastState"].store( E2 )
2100         if selfA._toStore("ForecastCovariance"):
2101             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(E2) )
2102         if selfA._toStore("BMA"):
2103             selfA.StoredVariables["BMA"].store( E2 - Xa )
2104         if selfA._toStore("InnovationAtCurrentState"):
2105             selfA.StoredVariables["InnovationAtCurrentState"].store( - HE2 + Ynpu )
2106         if selfA._toStore("SimulatedObservationAtCurrentState") \
2107             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2108             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HE2 )
2109         # ---> autres
2110         if selfA._parameters["StoreInternalVariables"] \
2111             or selfA._toStore("CostFunctionJ") \
2112             or selfA._toStore("CostFunctionJb") \
2113             or selfA._toStore("CostFunctionJo") \
2114             or selfA._toStore("CurrentOptimum") \
2115             or selfA._toStore("APosterioriCovariance"):
2116             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
2117             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
2118             J   = Jb + Jo
2119             selfA.StoredVariables["CostFunctionJb"].store( Jb )
2120             selfA.StoredVariables["CostFunctionJo"].store( Jo )
2121             selfA.StoredVariables["CostFunctionJ" ].store( J )
2122             #
2123             if selfA._toStore("IndexOfOptimum") \
2124                 or selfA._toStore("CurrentOptimum") \
2125                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
2126                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
2127                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
2128                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2129                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2130             if selfA._toStore("IndexOfOptimum"):
2131                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
2132             if selfA._toStore("CurrentOptimum"):
2133                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
2134             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2135                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
2136             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
2137                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
2138             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
2139                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
2140             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
2141                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
2142         if selfA._toStore("APosterioriCovariance"):
2143             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
2144         if selfA._parameters["EstimationOf"] == "Parameters" \
2145             and J < previousJMinimum:
2146             previousJMinimum    = J
2147             XaMin               = Xa
2148             if selfA._toStore("APosterioriCovariance"):
2149                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
2150         # ---> Pour les smoothers
2151         if selfA._toStore("CurrentEnsembleState"):
2152             selfA.StoredVariables["CurrentEnsembleState"].store( Xn )
2153     #
2154     # Stockage final supplémentaire de l'optimum en estimation de paramètres
2155     # ----------------------------------------------------------------------
2156     if selfA._parameters["EstimationOf"] == "Parameters":
2157         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2158         selfA.StoredVariables["Analysis"].store( XaMin )
2159         if selfA._toStore("APosterioriCovariance"):
2160             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
2161         if selfA._toStore("BMA"):
2162             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
2163     #
2164     return 0
2165
2166 # ==============================================================================
2167 def incr3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
2168     """
2169     3DVAR incrémental
2170     """
2171     #
2172     # Initialisations
2173     # ---------------
2174     Hm = HO["Direct"].appliedTo
2175     #
2176     BI = B.getI()
2177     RI = R.getI()
2178     #
2179     Xini = selfA._parameters["InitializationPoint"]
2180     #
2181     HXb = numpy.asmatrix(numpy.ravel( Hm( Xb ) )).T
2182     Innovation = Y - HXb
2183     #
2184     # Outer Loop
2185     # ----------
2186     iOuter = 0
2187     J      = 1./mpr
2188     DeltaJ = 1./mpr
2189     Xr     = Xini.reshape((-1,1))
2190     while abs(DeltaJ) >= selfA._parameters["CostDecrementTolerance"] and iOuter <= selfA._parameters["MaximumNumberOfSteps"]:
2191         #
2192         # Inner Loop
2193         # ----------
2194         Ht = HO["Tangent"].asMatrix(Xr)
2195         Ht = Ht.reshape(Y.size,Xr.size) # ADAO & check shape
2196         #
2197         # Définition de la fonction-coût
2198         # ------------------------------
2199         def CostFunction(dx):
2200             _dX  = numpy.asmatrix(numpy.ravel( dx )).T
2201             if selfA._parameters["StoreInternalVariables"] or \
2202                 selfA._toStore("CurrentState") or \
2203                 selfA._toStore("CurrentOptimum"):
2204                 selfA.StoredVariables["CurrentState"].store( Xb + _dX )
2205             _HdX = Ht * _dX
2206             _HdX = numpy.asmatrix(numpy.ravel( _HdX )).T
2207             _dInnovation = Innovation - _HdX
2208             if selfA._toStore("SimulatedObservationAtCurrentState") or \
2209                 selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2210                 selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HXb + _HdX )
2211             if selfA._toStore("InnovationAtCurrentState"):
2212                 selfA.StoredVariables["InnovationAtCurrentState"].store( _dInnovation )
2213             #
2214             Jb  = float( 0.5 * _dX.T * BI * _dX )
2215             Jo  = float( 0.5 * _dInnovation.T * RI * _dInnovation )
2216             J   = Jb + Jo
2217             #
2218             selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
2219             selfA.StoredVariables["CostFunctionJb"].store( Jb )
2220             selfA.StoredVariables["CostFunctionJo"].store( Jo )
2221             selfA.StoredVariables["CostFunctionJ" ].store( J )
2222             if selfA._toStore("IndexOfOptimum") or \
2223                 selfA._toStore("CurrentOptimum") or \
2224                 selfA._toStore("CostFunctionJAtCurrentOptimum") or \
2225                 selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
2226                 selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
2227                 selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2228                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2229             if selfA._toStore("IndexOfOptimum"):
2230                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
2231             if selfA._toStore("CurrentOptimum"):
2232                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
2233             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2234                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
2235             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
2236                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
2237             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
2238                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
2239             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
2240                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
2241             return J
2242         #
2243         def GradientOfCostFunction(dx):
2244             _dX          = numpy.asmatrix(numpy.ravel( dx )).T
2245             _HdX         = Ht * _dX
2246             _HdX         = numpy.asmatrix(numpy.ravel( _HdX )).T
2247             _dInnovation = Innovation - _HdX
2248             GradJb       = BI * _dX
2249             GradJo       = - Ht.T @ (RI * _dInnovation)
2250             GradJ        = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
2251             return GradJ
2252         #
2253         # Minimisation de la fonctionnelle
2254         # --------------------------------
2255         nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
2256         #
2257         if selfA._parameters["Minimizer"] == "LBFGSB":
2258             # Minimum, J_optimal, Informations = scipy.optimize.fmin_l_bfgs_b(
2259             if "0.19" <= scipy.version.version <= "1.1.0":
2260                 import lbfgsbhlt as optimiseur
2261             else:
2262                 import scipy.optimize as optimiseur
2263             Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
2264                 func        = CostFunction,
2265                 x0          = numpy.zeros(Xini.size),
2266                 fprime      = GradientOfCostFunction,
2267                 args        = (),
2268                 bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
2269                 maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
2270                 factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
2271                 pgtol       = selfA._parameters["ProjectedGradientTolerance"],
2272                 iprint      = selfA._parameters["optiprint"],
2273                 )
2274             nfeval = Informations['funcalls']
2275             rc     = Informations['warnflag']
2276         elif selfA._parameters["Minimizer"] == "TNC":
2277             Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
2278                 func        = CostFunction,
2279                 x0          = numpy.zeros(Xini.size),
2280                 fprime      = GradientOfCostFunction,
2281                 args        = (),
2282                 bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
2283                 maxfun      = selfA._parameters["MaximumNumberOfSteps"],
2284                 pgtol       = selfA._parameters["ProjectedGradientTolerance"],
2285                 ftol        = selfA._parameters["CostDecrementTolerance"],
2286                 messages    = selfA._parameters["optmessages"],
2287                 )
2288         elif selfA._parameters["Minimizer"] == "CG":
2289             Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
2290                 f           = CostFunction,
2291                 x0          = numpy.zeros(Xini.size),
2292                 fprime      = GradientOfCostFunction,
2293                 args        = (),
2294                 maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2295                 gtol        = selfA._parameters["GradientNormTolerance"],
2296                 disp        = selfA._parameters["optdisp"],
2297                 full_output = True,
2298                 )
2299         elif selfA._parameters["Minimizer"] == "NCG":
2300             Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
2301                 f           = CostFunction,
2302                 x0          = numpy.zeros(Xini.size),
2303                 fprime      = GradientOfCostFunction,
2304                 args        = (),
2305                 maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2306                 avextol     = selfA._parameters["CostDecrementTolerance"],
2307                 disp        = selfA._parameters["optdisp"],
2308                 full_output = True,
2309                 )
2310         elif selfA._parameters["Minimizer"] == "BFGS":
2311             Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
2312                 f           = CostFunction,
2313                 x0          = numpy.zeros(Xini.size),
2314                 fprime      = GradientOfCostFunction,
2315                 args        = (),
2316                 maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2317                 gtol        = selfA._parameters["GradientNormTolerance"],
2318                 disp        = selfA._parameters["optdisp"],
2319                 full_output = True,
2320                 )
2321         else:
2322             raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
2323         #
2324         IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2325         MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
2326         #
2327         if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
2328             Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
2329         else:
2330             Minimum = Xb + Minimum.reshape((-1,1))
2331         #
2332         Xr     = Minimum
2333         DeltaJ = selfA.StoredVariables["CostFunctionJ" ][-1] - J
2334         iOuter = selfA.StoredVariables["CurrentIterationNumber"][-1]
2335     #
2336     Xa = Xr
2337     #--------------------------
2338     #
2339     selfA.StoredVariables["Analysis"].store( Xa )
2340     #
2341     if selfA._toStore("OMA") or \
2342         selfA._toStore("SigmaObs2") or \
2343         selfA._toStore("SimulationQuantiles") or \
2344         selfA._toStore("SimulatedObservationAtOptimum"):
2345         if selfA._toStore("SimulatedObservationAtCurrentState"):
2346             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
2347         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2348             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
2349         else:
2350             HXa = Hm( Xa )
2351     #
2352     if selfA._toStore("APosterioriCovariance") or \
2353         selfA._toStore("SimulationQuantiles") or \
2354         selfA._toStore("JacobianMatrixAtOptimum") or \
2355         selfA._toStore("KalmanGainAtOptimum"):
2356         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
2357         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
2358     if selfA._toStore("APosterioriCovariance") or \
2359         selfA._toStore("SimulationQuantiles") or \
2360         selfA._toStore("KalmanGainAtOptimum"):
2361         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
2362         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
2363     if selfA._toStore("APosterioriCovariance") or \
2364         selfA._toStore("SimulationQuantiles"):
2365         A = HessienneEstimation(Xa.size, HaM, HtM, BI, RI)
2366     if selfA._toStore("APosterioriCovariance"):
2367         selfA.StoredVariables["APosterioriCovariance"].store( A )
2368     if selfA._toStore("JacobianMatrixAtOptimum"):
2369         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
2370     if selfA._toStore("KalmanGainAtOptimum"):
2371         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
2372         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
2373         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
2374     #
2375     # Calculs et/ou stockages supplémentaires
2376     # ---------------------------------------
2377     if selfA._toStore("Innovation") or \
2378         selfA._toStore("SigmaObs2") or \
2379         selfA._toStore("MahalanobisConsistency") or \
2380         selfA._toStore("OMB"):
2381         d  = Y - HXb
2382     if selfA._toStore("Innovation"):
2383         selfA.StoredVariables["Innovation"].store( d )
2384     if selfA._toStore("BMA"):
2385         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
2386     if selfA._toStore("OMA"):
2387         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
2388     if selfA._toStore("OMB"):
2389         selfA.StoredVariables["OMB"].store( d )
2390     if selfA._toStore("SigmaObs2"):
2391         TraceR = R.trace(Y.size)
2392         selfA.StoredVariables["SigmaObs2"].store( float( (d.T @ (numpy.ravel(Y)-numpy.ravel(HXa))) ) / TraceR )
2393     if selfA._toStore("MahalanobisConsistency"):
2394         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
2395     if selfA._toStore("SimulationQuantiles"):
2396         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
2397     if selfA._toStore("SimulatedObservationAtBackground"):
2398         selfA.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
2399     if selfA._toStore("SimulatedObservationAtOptimum"):
2400         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
2401     #
2402     return 0
2403
2404 # ==============================================================================
2405 def mlef(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="MLEF13",
2406     BnotT=False, _epsilon=1.e-3, _e=1.e-7, _jmax=15000):
2407     """
2408     Maximum Likelihood Ensemble Filter
2409     """
2410     if selfA._parameters["EstimationOf"] == "Parameters":
2411         selfA._parameters["StoreInternalVariables"] = True
2412     #
2413     # Opérateurs
2414     H = HO["Direct"].appliedControledFormTo
2415     #
2416     if selfA._parameters["EstimationOf"] == "State":
2417         M = EM["Direct"].appliedControledFormTo
2418     #
2419     if CM is not None and "Tangent" in CM and U is not None:
2420         Cm = CM["Tangent"].asMatrix(Xb)
2421     else:
2422         Cm = None
2423     #
2424     # Durée d'observation et tailles
2425     if hasattr(Y,"stepnumber"):
2426         duration = Y.stepnumber()
2427         __p = numpy.cumprod(Y.shape())[-1]
2428     else:
2429         duration = 2
2430         __p = numpy.array(Y).size
2431     #
2432     # Précalcul des inversions de B et R
2433     if selfA._parameters["StoreInternalVariables"] \
2434         or selfA._toStore("CostFunctionJ") \
2435         or selfA._toStore("CostFunctionJb") \
2436         or selfA._toStore("CostFunctionJo") \
2437         or selfA._toStore("CurrentOptimum") \
2438         or selfA._toStore("APosterioriCovariance"):
2439         BI = B.getI()
2440     RI = R.getI()
2441     #
2442     __n = Xb.size
2443     __m = selfA._parameters["NumberOfMembers"]
2444     #
2445     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
2446         Xn = EnsembleOfBackgroundPerturbations( Xb, None, __m )
2447         selfA.StoredVariables["Analysis"].store( Xb )
2448         if selfA._toStore("APosterioriCovariance"):
2449             if hasattr(B,"asfullmatrix"):
2450                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
2451             else:
2452                 selfA.StoredVariables["APosterioriCovariance"].store( B )
2453         selfA._setInternalState("seed", numpy.random.get_state())
2454     elif selfA._parameters["nextStep"]:
2455         Xn = selfA._getInternalState("Xn")
2456     #
2457     previousJMinimum = numpy.finfo(float).max
2458     #
2459     for step in range(duration-1):
2460         numpy.random.set_state(selfA._getInternalState("seed"))
2461         if hasattr(Y,"store"):
2462             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
2463         else:
2464             Ynpu = numpy.ravel( Y ).reshape((__p,1))
2465         #
2466         if U is not None:
2467             if hasattr(U,"store") and len(U)>1:
2468                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
2469             elif hasattr(U,"store") and len(U)==1:
2470                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
2471             else:
2472                 Un = numpy.asmatrix(numpy.ravel( U )).T
2473         else:
2474             Un = None
2475         #
2476         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
2477             Xn = CovarianceInflation( Xn,
2478                 selfA._parameters["InflationType"],
2479                 selfA._parameters["InflationFactor"],
2480                 )
2481         #
2482         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
2483             EMX = M( [(Xn[:,i], Un) for i in range(__m)],
2484                 argsAsSerie = True,
2485                 returnSerieAsArrayMatrix = True )
2486             Xn_predicted = EnsemblePerturbationWithGivenCovariance( EMX, Q )
2487             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
2488                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
2489                 Xn_predicted = Xn_predicted + Cm * Un
2490         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
2491             # --- > Par principe, M = Id, Q = 0
2492             Xn_predicted = EMX = Xn
2493         #
2494         #--------------------------
2495         if VariantM == "MLEF13":
2496             Xfm = numpy.ravel(Xn_predicted.mean(axis=1, dtype=mfp).astype('float'))
2497             EaX = EnsembleOfAnomalies( Xn_predicted, Xfm, 1./math.sqrt(__m-1) )
2498             Ua  = numpy.identity(__m)
2499             __j = 0
2500             Deltaw = 1
2501             if not BnotT:
2502                 Ta  = numpy.identity(__m)
2503             vw  = numpy.zeros(__m)
2504             while numpy.linalg.norm(Deltaw) >= _e and __j <= _jmax:
2505                 vx1 = (Xfm + EaX @ vw).reshape((__n,1))
2506                 #
2507                 if BnotT:
2508                     E1 = vx1 + _epsilon * EaX
2509                 else:
2510                     E1 = vx1 + math.sqrt(__m-1) * EaX @ Ta
2511                 #
2512                 HE2 = H( [(E1[:,i,numpy.newaxis], Un) for i in range(__m)],
2513                     argsAsSerie = True,
2514                     returnSerieAsArrayMatrix = True )
2515                 vy2 = HE2.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
2516                 #
2517                 if BnotT:
2518                     EaY = (HE2 - vy2) / _epsilon
2519                 else:
2520                     EaY = ( (HE2 - vy2) @ numpy.linalg.inv(Ta) ) / math.sqrt(__m-1)
2521                 #
2522                 GradJ = numpy.ravel(vw[:,None] - EaY.transpose() @ (RI * ( Ynpu - vy2 )))
2523                 mH = numpy.identity(__m) + EaY.transpose() @ (RI * EaY).reshape((-1,__m))
2524                 Deltaw = - numpy.linalg.solve(mH,GradJ)
2525                 #
2526                 vw = vw + Deltaw
2527                 #
2528                 if not BnotT:
2529                     Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
2530                 #
2531                 __j = __j + 1
2532             #
2533             if BnotT:
2534                 Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
2535             #
2536             Xn = vx1 + math.sqrt(__m-1) * EaX @ Ta @ Ua
2537         #--------------------------
2538         else:
2539             raise ValueError("VariantM has to be chosen in the authorized methods list.")
2540         #
2541         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
2542             Xn = CovarianceInflation( Xn,
2543                 selfA._parameters["InflationType"],
2544                 selfA._parameters["InflationFactor"],
2545                 )
2546         #
2547         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
2548         #--------------------------
2549         selfA._setInternalState("Xn", Xn)
2550         selfA._setInternalState("seed", numpy.random.get_state())
2551         #--------------------------
2552         #
2553         if selfA._parameters["StoreInternalVariables"] \
2554             or selfA._toStore("CostFunctionJ") \
2555             or selfA._toStore("CostFunctionJb") \
2556             or selfA._toStore("CostFunctionJo") \
2557             or selfA._toStore("APosterioriCovariance") \
2558             or selfA._toStore("InnovationAtCurrentAnalysis") \
2559             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
2560             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2561             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
2562             _Innovation = Ynpu - _HXa
2563         #
2564         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2565         # ---> avec analysis
2566         selfA.StoredVariables["Analysis"].store( Xa )
2567         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
2568             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
2569         if selfA._toStore("InnovationAtCurrentAnalysis"):
2570             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
2571         # ---> avec current state
2572         if selfA._parameters["StoreInternalVariables"] \
2573             or selfA._toStore("CurrentState"):
2574             selfA.StoredVariables["CurrentState"].store( Xn )
2575         if selfA._toStore("ForecastState"):
2576             selfA.StoredVariables["ForecastState"].store( EMX )
2577         if selfA._toStore("ForecastCovariance"):
2578             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(EMX) )
2579         if selfA._toStore("BMA"):
2580             selfA.StoredVariables["BMA"].store( EMX - Xa )
2581         if selfA._toStore("InnovationAtCurrentState"):
2582             selfA.StoredVariables["InnovationAtCurrentState"].store( - HE2 + Ynpu )
2583         if selfA._toStore("SimulatedObservationAtCurrentState") \
2584             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2585             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HE2 )
2586         # ---> autres
2587         if selfA._parameters["StoreInternalVariables"] \
2588             or selfA._toStore("CostFunctionJ") \
2589             or selfA._toStore("CostFunctionJb") \
2590             or selfA._toStore("CostFunctionJo") \
2591             or selfA._toStore("CurrentOptimum") \
2592             or selfA._toStore("APosterioriCovariance"):
2593             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
2594             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
2595             J   = Jb + Jo
2596             selfA.StoredVariables["CostFunctionJb"].store( Jb )
2597             selfA.StoredVariables["CostFunctionJo"].store( Jo )
2598             selfA.StoredVariables["CostFunctionJ" ].store( J )
2599             #
2600             if selfA._toStore("IndexOfOptimum") \
2601                 or selfA._toStore("CurrentOptimum") \
2602                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
2603                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
2604                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
2605                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2606                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2607             if selfA._toStore("IndexOfOptimum"):
2608                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
2609             if selfA._toStore("CurrentOptimum"):
2610                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
2611             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2612                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
2613             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
2614                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
2615             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
2616                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
2617             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
2618                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
2619         if selfA._toStore("APosterioriCovariance"):
2620             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
2621         if selfA._parameters["EstimationOf"] == "Parameters" \
2622             and J < previousJMinimum:
2623             previousJMinimum    = J
2624             XaMin               = Xa
2625             if selfA._toStore("APosterioriCovariance"):
2626                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
2627         # ---> Pour les smoothers
2628         if selfA._toStore("CurrentEnsembleState"):
2629             selfA.StoredVariables["CurrentEnsembleState"].store( Xn )
2630     #
2631     # Stockage final supplémentaire de l'optimum en estimation de paramètres
2632     # ----------------------------------------------------------------------
2633     if selfA._parameters["EstimationOf"] == "Parameters":
2634         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2635         selfA.StoredVariables["Analysis"].store( XaMin )
2636         if selfA._toStore("APosterioriCovariance"):
2637             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
2638         if selfA._toStore("BMA"):
2639             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
2640     #
2641     return 0
2642
2643 # ==============================================================================
2644 def mmqr(
2645         func     = None,
2646         x0       = None,
2647         fprime   = None,
2648         bounds   = None,
2649         quantile = 0.5,
2650         maxfun   = 15000,
2651         toler    = 1.e-06,
2652         y        = None,
2653         ):
2654     """
2655     Implémentation informatique de l'algorithme MMQR, basée sur la publication :
2656     David R. Hunter, Kenneth Lange, "Quantile Regression via an MM Algorithm",
2657     Journal of Computational and Graphical Statistics, 9, 1, pp.60-77, 2000.
2658     """
2659     #
2660     # Recuperation des donnees et informations initiales
2661     # --------------------------------------------------
2662     variables = numpy.ravel( x0 )
2663     mesures   = numpy.ravel( y )
2664     increment = sys.float_info[0]
2665     p         = variables.size
2666     n         = mesures.size
2667     quantile  = float(quantile)
2668     #
2669     # Calcul des parametres du MM
2670     # ---------------------------
2671     tn      = float(toler) / n
2672     e0      = -tn / math.log(tn)
2673     epsilon = (e0-tn)/(1+math.log(e0))
2674     #
2675     # Calculs d'initialisation
2676     # ------------------------
2677     residus  = mesures - numpy.ravel( func( variables ) )
2678     poids    = 1./(epsilon+numpy.abs(residus))
2679     veps     = 1. - 2. * quantile - residus * poids
2680     lastsurrogate = -numpy.sum(residus*veps) - (1.-2.*quantile)*numpy.sum(residus)
2681     iteration = 0
2682     #
2683     # Recherche iterative
2684     # -------------------
2685     while (increment > toler) and (iteration < maxfun) :
2686         iteration += 1
2687         #
2688         Derivees  = numpy.array(fprime(variables))
2689         Derivees  = Derivees.reshape(n,p) # Necessaire pour remettre en place la matrice si elle passe par des tuyaux YACS
2690         DeriveesT = Derivees.transpose()
2691         M         =   numpy.dot( DeriveesT , (numpy.array(numpy.matrix(p*[poids,]).T)*Derivees) )
2692         SM        =   numpy.transpose(numpy.dot( DeriveesT , veps ))
2693         step      = - numpy.linalg.lstsq( M, SM, rcond=-1 )[0]
2694         #
2695         variables = variables + step
2696         if bounds is not None:
2697             # Attention : boucle infinie à éviter si un intervalle est trop petit
2698             while( (variables < numpy.ravel(numpy.asmatrix(bounds)[:,0])).any() or (variables > numpy.ravel(numpy.asmatrix(bounds)[:,1])).any() ):
2699                 step      = step/2.
2700                 variables = variables - step
2701         residus   = mesures - numpy.ravel( func(variables) )
2702         surrogate = numpy.sum(residus**2 * poids) + (4.*quantile-2.) * numpy.sum(residus)
2703         #
2704         while ( (surrogate > lastsurrogate) and ( max(list(numpy.abs(step))) > 1.e-16 ) ) :
2705             step      = step/2.
2706             variables = variables - step
2707             residus   = mesures - numpy.ravel( func(variables) )
2708             surrogate = numpy.sum(residus**2 * poids) + (4.*quantile-2.) * numpy.sum(residus)
2709         #
2710         increment     = lastsurrogate-surrogate
2711         poids         = 1./(epsilon+numpy.abs(residus))
2712         veps          = 1. - 2. * quantile - residus * poids
2713         lastsurrogate = -numpy.sum(residus * veps) - (1.-2.*quantile)*numpy.sum(residus)
2714     #
2715     # Mesure d'écart
2716     # --------------
2717     Ecart = quantile * numpy.sum(residus) - numpy.sum( residus[residus<0] )
2718     #
2719     return variables, Ecart, [n,p,iteration,increment,0]
2720
2721 # ==============================================================================
2722 def multi3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, oneCycle):
2723     """
2724     3DVAR multi-pas et multi-méthodes
2725     """
2726     #
2727     # Initialisation
2728     # --------------
2729     if selfA._parameters["EstimationOf"] == "State":
2730         M = EM["Direct"].appliedTo
2731         #
2732         if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
2733             Xn = numpy.ravel(Xb).reshape((-1,1))
2734             selfA.StoredVariables["Analysis"].store( Xn )
2735             if selfA._toStore("APosterioriCovariance"):
2736                 if hasattr(B,"asfullmatrix"):
2737                     selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(Xn.size) )
2738                 else:
2739                     selfA.StoredVariables["APosterioriCovariance"].store( B )
2740             if selfA._toStore("ForecastState"):
2741                 selfA.StoredVariables["ForecastState"].store( Xn )
2742         elif selfA._parameters["nextStep"]:
2743             Xn = selfA._getInternalState("Xn")
2744     else:
2745         Xn = numpy.ravel(Xb).reshape((-1,1))
2746     #
2747     if hasattr(Y,"stepnumber"):
2748         duration = Y.stepnumber()
2749     else:
2750         duration = 2
2751     #
2752     # Multi-pas
2753     for step in range(duration-1):
2754         if hasattr(Y,"store"):
2755             Ynpu = numpy.ravel( Y[step+1] ).reshape((-1,1))
2756         else:
2757             Ynpu = numpy.ravel( Y ).reshape((-1,1))
2758         #
2759         if selfA._parameters["EstimationOf"] == "State": # Forecast
2760             Xn_predicted = M( Xn )
2761             if selfA._toStore("ForecastState"):
2762                 selfA.StoredVariables["ForecastState"].store( Xn_predicted )
2763         elif selfA._parameters["EstimationOf"] == "Parameters": # No forecast
2764             # --- > Par principe, M = Id, Q = 0
2765             Xn_predicted = Xn
2766         Xn_predicted = numpy.ravel(Xn_predicted).reshape((-1,1))
2767         #
2768         oneCycle(selfA, Xn_predicted, Ynpu, U, HO, None, None, R, B, None)
2769         #
2770         Xn = selfA.StoredVariables["Analysis"][-1]
2771         #--------------------------
2772         selfA._setInternalState("Xn", Xn)
2773     #
2774     return 0
2775
2776 # ==============================================================================
2777 def psas3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
2778     """
2779     3DVAR PSAS
2780     """
2781     #
2782     # Initialisations
2783     # ---------------
2784     Hm = HO["Direct"].appliedTo
2785     #
2786     if HO["AppliedInX"] is not None and "HXb" in HO["AppliedInX"]:
2787         HXb = Hm( Xb, HO["AppliedInX"]["HXb"] )
2788     else:
2789         HXb = Hm( Xb )
2790     HXb = numpy.asmatrix(numpy.ravel( HXb )).T
2791     if Y.size != HXb.size:
2792         raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
2793     if max(Y.shape) != max(HXb.shape):
2794         raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
2795     #
2796     if selfA._toStore("JacobianMatrixAtBackground"):
2797         HtMb = HO["Tangent"].asMatrix(ValueForMethodForm = Xb)
2798         HtMb = HtMb.reshape(Y.size,Xb.size) # ADAO & check shape
2799         selfA.StoredVariables["JacobianMatrixAtBackground"].store( HtMb )
2800     #
2801     Ht = HO["Tangent"].asMatrix(Xb)
2802     BHT = B * Ht.T
2803     HBHTpR = R + Ht * BHT
2804     Innovation = Y - HXb
2805     #
2806     Xini = numpy.zeros(Xb.shape)
2807     #
2808     # Définition de la fonction-coût
2809     # ------------------------------
2810     def CostFunction(w):
2811         _W = w.reshape((-1,1))
2812         if selfA._parameters["StoreInternalVariables"] or \
2813             selfA._toStore("CurrentState") or \
2814             selfA._toStore("CurrentOptimum"):
2815             selfA.StoredVariables["CurrentState"].store( Xb + BHT @ _W )
2816         if selfA._toStore("SimulatedObservationAtCurrentState") or \
2817             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2818             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( Hm( Xb + BHT @ _W ) )
2819         if selfA._toStore("InnovationAtCurrentState"):
2820             selfA.StoredVariables["InnovationAtCurrentState"].store( Innovation )
2821         #
2822         Jb  = float( 0.5 * _W.T @ (HBHTpR @ _W) )
2823         Jo  = float( - _W.T @ Innovation )
2824         J   = Jb + Jo
2825         #
2826         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
2827         selfA.StoredVariables["CostFunctionJb"].store( Jb )
2828         selfA.StoredVariables["CostFunctionJo"].store( Jo )
2829         selfA.StoredVariables["CostFunctionJ" ].store( J )
2830         if selfA._toStore("IndexOfOptimum") or \
2831             selfA._toStore("CurrentOptimum") or \
2832             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
2833             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
2834             selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
2835             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2836             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2837         if selfA._toStore("IndexOfOptimum"):
2838             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
2839         if selfA._toStore("CurrentOptimum"):
2840             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
2841         if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2842             selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
2843         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
2844             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
2845         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
2846             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
2847         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
2848             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
2849         return J
2850     #
2851     def GradientOfCostFunction(w):
2852         _W = w.reshape((-1,1))
2853         GradJb  = HBHTpR @ _W
2854         GradJo  = - Innovation
2855         GradJ   = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
2856         return GradJ
2857     #
2858     # Minimisation de la fonctionnelle
2859     # --------------------------------
2860     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
2861     #
2862     if selfA._parameters["Minimizer"] == "LBFGSB":
2863         if "0.19" <= scipy.version.version <= "1.1.0":
2864             import lbfgsbhlt as optimiseur
2865         else:
2866             import scipy.optimize as optimiseur
2867         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
2868             func        = CostFunction,
2869             x0          = Xini,
2870             fprime      = GradientOfCostFunction,
2871             args        = (),
2872             bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
2873             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
2874             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
2875             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
2876             iprint      = selfA._parameters["optiprint"],
2877             )
2878         nfeval = Informations['funcalls']
2879         rc     = Informations['warnflag']
2880     elif selfA._parameters["Minimizer"] == "TNC":
2881         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
2882             func        = CostFunction,
2883             x0          = Xini,
2884             fprime      = GradientOfCostFunction,
2885             args        = (),
2886             bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
2887             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
2888             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
2889             ftol        = selfA._parameters["CostDecrementTolerance"],
2890             messages    = selfA._parameters["optmessages"],
2891             )
2892     elif selfA._parameters["Minimizer"] == "CG":
2893         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
2894             f           = CostFunction,
2895             x0          = Xini,
2896             fprime      = GradientOfCostFunction,
2897             args        = (),
2898             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2899             gtol        = selfA._parameters["GradientNormTolerance"],
2900             disp        = selfA._parameters["optdisp"],
2901             full_output = True,
2902             )
2903     elif selfA._parameters["Minimizer"] == "NCG":
2904         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
2905             f           = CostFunction,
2906             x0          = Xini,
2907             fprime      = GradientOfCostFunction,
2908             args        = (),
2909             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2910             avextol     = selfA._parameters["CostDecrementTolerance"],
2911             disp        = selfA._parameters["optdisp"],
2912             full_output = True,
2913             )
2914     elif selfA._parameters["Minimizer"] == "BFGS":
2915         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
2916             f           = CostFunction,
2917             x0          = Xini,
2918             fprime      = GradientOfCostFunction,
2919             args        = (),
2920             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2921             gtol        = selfA._parameters["GradientNormTolerance"],
2922             disp        = selfA._parameters["optdisp"],
2923             full_output = True,
2924             )
2925     else:
2926         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
2927     #
2928     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2929     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
2930     #
2931     # Correction pour pallier a un bug de TNC sur le retour du Minimum
2932     # ----------------------------------------------------------------
2933     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
2934         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
2935     else:
2936         Minimum = Xb + BHT @ Minimum.reshape((-1,1))
2937     #
2938     Xa = Minimum
2939     #--------------------------
2940     #
2941     selfA.StoredVariables["Analysis"].store( Xa )
2942     #
2943     if selfA._toStore("OMA") or \
2944         selfA._toStore("SigmaObs2") or \
2945         selfA._toStore("SimulationQuantiles") or \
2946         selfA._toStore("SimulatedObservationAtOptimum"):
2947         if selfA._toStore("SimulatedObservationAtCurrentState"):
2948             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
2949         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2950             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
2951         else:
2952             HXa = Hm( Xa )
2953     #
2954     if selfA._toStore("APosterioriCovariance") or \
2955         selfA._toStore("SimulationQuantiles") or \
2956         selfA._toStore("JacobianMatrixAtOptimum") or \
2957         selfA._toStore("KalmanGainAtOptimum"):
2958         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
2959         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
2960     if selfA._toStore("APosterioriCovariance") or \
2961         selfA._toStore("SimulationQuantiles") or \
2962         selfA._toStore("KalmanGainAtOptimum"):
2963         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
2964         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
2965     if selfA._toStore("APosterioriCovariance") or \
2966         selfA._toStore("SimulationQuantiles"):
2967         BI = B.getI()
2968         RI = R.getI()
2969         A = HessienneEstimation(Xa.size, HaM, HtM, BI, RI)
2970     if selfA._toStore("APosterioriCovariance"):
2971         selfA.StoredVariables["APosterioriCovariance"].store( A )
2972     if selfA._toStore("JacobianMatrixAtOptimum"):
2973         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
2974     if selfA._toStore("KalmanGainAtOptimum"):
2975         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
2976         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
2977         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
2978     #
2979     # Calculs et/ou stockages supplémentaires
2980     # ---------------------------------------
2981     if selfA._toStore("Innovation") or \
2982         selfA._toStore("SigmaObs2") or \
2983         selfA._toStore("MahalanobisConsistency") or \
2984         selfA._toStore("OMB"):
2985         d  = Y - HXb
2986     if selfA._toStore("Innovation"):
2987         selfA.StoredVariables["Innovation"].store( d )
2988     if selfA._toStore("BMA"):
2989         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
2990     if selfA._toStore("OMA"):
2991         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
2992     if selfA._toStore("OMB"):
2993         selfA.StoredVariables["OMB"].store( d )
2994     if selfA._toStore("SigmaObs2"):
2995         TraceR = R.trace(Y.size)
2996         selfA.StoredVariables["SigmaObs2"].store( float( (d.T @ (numpy.ravel(Y)-numpy.ravel(HXa))) ) / TraceR )
2997     if selfA._toStore("MahalanobisConsistency"):
2998         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
2999     if selfA._toStore("SimulationQuantiles"):
3000         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
3001     if selfA._toStore("SimulatedObservationAtBackground"):
3002         selfA.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
3003     if selfA._toStore("SimulatedObservationAtOptimum"):
3004         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
3005     #
3006     return 0
3007
3008 # ==============================================================================
3009 def senkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula16"):
3010     """
3011     Stochastic EnKF
3012     """
3013     if selfA._parameters["EstimationOf"] == "Parameters":
3014         selfA._parameters["StoreInternalVariables"] = True
3015     #
3016     # Opérateurs
3017     H = HO["Direct"].appliedControledFormTo
3018     #
3019     if selfA._parameters["EstimationOf"] == "State":
3020         M = EM["Direct"].appliedControledFormTo
3021     #
3022     if CM is not None and "Tangent" in CM and U is not None:
3023         Cm = CM["Tangent"].asMatrix(Xb)
3024     else:
3025         Cm = None
3026     #
3027     # Durée d'observation et tailles
3028     if hasattr(Y,"stepnumber"):
3029         duration = Y.stepnumber()
3030         __p = numpy.cumprod(Y.shape())[-1]
3031     else:
3032         duration = 2
3033         __p = numpy.array(Y).size
3034     #
3035     # Précalcul des inversions de B et R
3036     if selfA._parameters["StoreInternalVariables"] \
3037         or selfA._toStore("CostFunctionJ") \
3038         or selfA._toStore("CostFunctionJb") \
3039         or selfA._toStore("CostFunctionJo") \
3040         or selfA._toStore("CurrentOptimum") \
3041         or selfA._toStore("APosterioriCovariance"):
3042         BI = B.getI()
3043         RI = R.getI()
3044     #
3045     __n = Xb.size
3046     __m = selfA._parameters["NumberOfMembers"]
3047     #
3048     if hasattr(R,"asfullmatrix"): Rn = R.asfullmatrix(__p)
3049     else:                         Rn = R
3050     #
3051     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
3052         if hasattr(B,"asfullmatrix"): Pn = B.asfullmatrix(__n)
3053         else:                         Pn = B
3054         Xn = EnsembleOfBackgroundPerturbations( Xb, Pn, __m )
3055         selfA.StoredVariables["Analysis"].store( Xb )
3056         if selfA._toStore("APosterioriCovariance"):
3057             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
3058         selfA._setInternalState("seed", numpy.random.get_state())
3059     elif selfA._parameters["nextStep"]:
3060         Xn = selfA._getInternalState("Xn")
3061     #
3062     previousJMinimum = numpy.finfo(float).max
3063     #
3064     for step in range(duration-1):
3065         numpy.random.set_state(selfA._getInternalState("seed"))
3066         if hasattr(Y,"store"):
3067             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
3068         else:
3069             Ynpu = numpy.ravel( Y ).reshape((__p,1))
3070         #
3071         if U is not None:
3072             if hasattr(U,"store") and len(U)>1:
3073                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
3074             elif hasattr(U,"store") and len(U)==1:
3075                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
3076             else:
3077                 Un = numpy.asmatrix(numpy.ravel( U )).T
3078         else:
3079             Un = None
3080         #
3081         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
3082             Xn = CovarianceInflation( Xn,
3083                 selfA._parameters["InflationType"],
3084                 selfA._parameters["InflationFactor"],
3085                 )
3086         #
3087         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
3088             EMX = M( [(Xn[:,i], Un) for i in range(__m)],
3089                 argsAsSerie = True,
3090                 returnSerieAsArrayMatrix = True )
3091             Xn_predicted = EnsemblePerturbationWithGivenCovariance( EMX, Q )
3092             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
3093                 argsAsSerie = True,
3094                 returnSerieAsArrayMatrix = True )
3095             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
3096                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
3097                 Xn_predicted = Xn_predicted + Cm * Un
3098         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
3099             # --- > Par principe, M = Id, Q = 0
3100             Xn_predicted = EMX = Xn
3101             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
3102                 argsAsSerie = True,
3103                 returnSerieAsArrayMatrix = True )
3104         #
3105         # Mean of forecast and observation of forecast
3106         Xfm  = Xn_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
3107         Hfm  = HX_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
3108         #
3109         #--------------------------
3110         if VariantM == "KalmanFilterFormula05":
3111             PfHT, HPfHT = 0., 0.
3112             for i in range(__m):
3113                 Exfi = Xn_predicted[:,i].reshape((__n,1)) - Xfm
3114                 Eyfi = HX_predicted[:,i].reshape((__p,1)) - Hfm
3115                 PfHT  += Exfi * Eyfi.T
3116                 HPfHT += Eyfi * Eyfi.T
3117             PfHT  = (1./(__m-1)) * PfHT
3118             HPfHT = (1./(__m-1)) * HPfHT
3119             Kn     = PfHT * ( R + HPfHT ).I
3120             del PfHT, HPfHT
3121             #
3122             for i in range(__m):
3123                 ri = numpy.random.multivariate_normal(numpy.zeros(__p), Rn)
3124                 Xn[:,i] = numpy.ravel(Xn_predicted[:,i]) + Kn @ (numpy.ravel(Ynpu) + ri - HX_predicted[:,i])
3125         #--------------------------
3126         elif VariantM == "KalmanFilterFormula16":
3127             EpY   = EnsembleOfCenteredPerturbations(Ynpu, Rn, __m)
3128             EpYm  = EpY.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
3129             #
3130             EaX   = EnsembleOfAnomalies( Xn_predicted ) / math.sqrt(__m-1)
3131             EaY = (HX_predicted - Hfm - EpY + EpYm) / math.sqrt(__m-1)
3132             #
3133             Kn = EaX @ EaY.T @ numpy.linalg.inv( EaY @ EaY.T)
3134             #
3135             for i in range(__m):
3136                 Xn[:,i] = numpy.ravel(Xn_predicted[:,i]) + Kn @ (numpy.ravel(EpY[:,i]) - HX_predicted[:,i])
3137         #--------------------------
3138         else:
3139             raise ValueError("VariantM has to be chosen in the authorized methods list.")
3140         #
3141         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
3142             Xn = CovarianceInflation( Xn,
3143                 selfA._parameters["InflationType"],
3144                 selfA._parameters["InflationFactor"],
3145                 )
3146         #
3147         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
3148         #--------------------------
3149         selfA._setInternalState("Xn", Xn)
3150         selfA._setInternalState("seed", numpy.random.get_state())
3151         #--------------------------
3152         #
3153         if selfA._parameters["StoreInternalVariables"] \
3154             or selfA._toStore("CostFunctionJ") \
3155             or selfA._toStore("CostFunctionJb") \
3156             or selfA._toStore("CostFunctionJo") \
3157             or selfA._toStore("APosterioriCovariance") \
3158             or selfA._toStore("InnovationAtCurrentAnalysis") \
3159             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
3160             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3161             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
3162             _Innovation = Ynpu - _HXa
3163         #
3164         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3165         # ---> avec analysis
3166         selfA.StoredVariables["Analysis"].store( Xa )
3167         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
3168             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
3169         if selfA._toStore("InnovationAtCurrentAnalysis"):
3170             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
3171         # ---> avec current state
3172         if selfA._parameters["StoreInternalVariables"] \
3173             or selfA._toStore("CurrentState"):
3174             selfA.StoredVariables["CurrentState"].store( Xn )
3175         if selfA._toStore("ForecastState"):
3176             selfA.StoredVariables["ForecastState"].store( EMX )
3177         if selfA._toStore("ForecastCovariance"):
3178             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(EMX) )
3179         if selfA._toStore("BMA"):
3180             selfA.StoredVariables["BMA"].store( EMX - Xa )
3181         if selfA._toStore("InnovationAtCurrentState"):
3182             selfA.StoredVariables["InnovationAtCurrentState"].store( - HX_predicted + Ynpu )
3183         if selfA._toStore("SimulatedObservationAtCurrentState") \
3184             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3185             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
3186         # ---> autres
3187         if selfA._parameters["StoreInternalVariables"] \
3188             or selfA._toStore("CostFunctionJ") \
3189             or selfA._toStore("CostFunctionJb") \
3190             or selfA._toStore("CostFunctionJo") \
3191             or selfA._toStore("CurrentOptimum") \
3192             or selfA._toStore("APosterioriCovariance"):
3193             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
3194             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
3195             J   = Jb + Jo
3196             selfA.StoredVariables["CostFunctionJb"].store( Jb )
3197             selfA.StoredVariables["CostFunctionJo"].store( Jo )
3198             selfA.StoredVariables["CostFunctionJ" ].store( J )
3199             #
3200             if selfA._toStore("IndexOfOptimum") \
3201                 or selfA._toStore("CurrentOptimum") \
3202                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
3203                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
3204                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
3205                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3206                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3207             if selfA._toStore("IndexOfOptimum"):
3208                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3209             if selfA._toStore("CurrentOptimum"):
3210                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
3211             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3212                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
3213             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3214                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3215             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3216                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3217             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3218                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3219         if selfA._toStore("APosterioriCovariance"):
3220             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
3221         if selfA._parameters["EstimationOf"] == "Parameters" \
3222             and J < previousJMinimum:
3223             previousJMinimum    = J
3224             XaMin               = Xa
3225             if selfA._toStore("APosterioriCovariance"):
3226                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
3227         # ---> Pour les smoothers
3228         if selfA._toStore("CurrentEnsembleState"):
3229             selfA.StoredVariables["CurrentEnsembleState"].store( Xn )
3230     #
3231     # Stockage final supplémentaire de l'optimum en estimation de paramètres
3232     # ----------------------------------------------------------------------
3233     if selfA._parameters["EstimationOf"] == "Parameters":
3234         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3235         selfA.StoredVariables["Analysis"].store( XaMin )
3236         if selfA._toStore("APosterioriCovariance"):
3237             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
3238         if selfA._toStore("BMA"):
3239             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
3240     #
3241     return 0
3242
3243 # ==============================================================================
3244 def std3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3245     """
3246     3DVAR
3247     """
3248     #
3249     # Initialisations
3250     # ---------------
3251     Hm = HO["Direct"].appliedTo
3252     Ha = HO["Adjoint"].appliedInXTo
3253     #
3254     if HO["AppliedInX"] is not None and "HXb" in HO["AppliedInX"]:
3255         HXb = Hm( Xb, HO["AppliedInX"]["HXb"] )
3256     else:
3257         HXb = Hm( Xb )
3258     HXb = HXb.reshape((-1,1))
3259     if Y.size != HXb.size:
3260         raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
3261     if max(Y.shape) != max(HXb.shape):
3262         raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
3263     #
3264     if selfA._toStore("JacobianMatrixAtBackground"):
3265         HtMb = HO["Tangent"].asMatrix(ValueForMethodForm = Xb)
3266         HtMb = HtMb.reshape(Y.size,Xb.size) # ADAO & check shape
3267         selfA.StoredVariables["JacobianMatrixAtBackground"].store( HtMb )
3268     #
3269     BI = B.getI()
3270     RI = R.getI()
3271     #
3272     Xini = selfA._parameters["InitializationPoint"]
3273     #
3274     # Définition de la fonction-coût
3275     # ------------------------------
3276     def CostFunction(x):
3277         _X  = numpy.ravel( x ).reshape((-1,1))
3278         if selfA._parameters["StoreInternalVariables"] or \
3279             selfA._toStore("CurrentState") or \
3280             selfA._toStore("CurrentOptimum"):
3281             selfA.StoredVariables["CurrentState"].store( _X )
3282         _HX = Hm( _X ).reshape((-1,1))
3283         _Innovation = Y - _HX
3284         if selfA._toStore("SimulatedObservationAtCurrentState") or \
3285             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3286             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
3287         if selfA._toStore("InnovationAtCurrentState"):
3288             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
3289         #
3290         Jb  = float( 0.5 * (_X - Xb).T * (BI * (_X - Xb)) )
3291         Jo  = float( 0.5 * _Innovation.T * (RI * _Innovation) )
3292         J   = Jb + Jo
3293         #
3294         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
3295         selfA.StoredVariables["CostFunctionJb"].store( Jb )
3296         selfA.StoredVariables["CostFunctionJo"].store( Jo )
3297         selfA.StoredVariables["CostFunctionJ" ].store( J )
3298         if selfA._toStore("IndexOfOptimum") or \
3299             selfA._toStore("CurrentOptimum") or \
3300             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
3301             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
3302             selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
3303             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3304             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3305         if selfA._toStore("IndexOfOptimum"):
3306             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3307         if selfA._toStore("CurrentOptimum"):
3308             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
3309         if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3310             selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
3311         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3312             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3313         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3314             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3315         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3316             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3317         return J
3318     #
3319     def GradientOfCostFunction(x):
3320         _X      = x.reshape((-1,1))
3321         _HX     = Hm( _X ).reshape((-1,1))
3322         GradJb  = BI * (_X - Xb)
3323         GradJo  = - Ha( (_X, RI * (Y - _HX)) )
3324         GradJ   = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
3325         return GradJ
3326     #
3327     # Minimisation de la fonctionnelle
3328     # --------------------------------
3329     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
3330     #
3331     if selfA._parameters["Minimizer"] == "LBFGSB":
3332         if "0.19" <= scipy.version.version <= "1.1.0":
3333             import lbfgsbhlt as optimiseur
3334         else:
3335             import scipy.optimize as optimiseur
3336         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
3337             func        = CostFunction,
3338             x0          = Xini,
3339             fprime      = GradientOfCostFunction,
3340             args        = (),
3341             bounds      = selfA._parameters["Bounds"],
3342             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
3343             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
3344             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3345             iprint      = selfA._parameters["optiprint"],
3346             )
3347         nfeval = Informations['funcalls']
3348         rc     = Informations['warnflag']
3349     elif selfA._parameters["Minimizer"] == "TNC":
3350         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
3351             func        = CostFunction,
3352             x0          = Xini,
3353             fprime      = GradientOfCostFunction,
3354             args        = (),
3355             bounds      = selfA._parameters["Bounds"],
3356             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
3357             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3358             ftol        = selfA._parameters["CostDecrementTolerance"],
3359             messages    = selfA._parameters["optmessages"],
3360             )
3361     elif selfA._parameters["Minimizer"] == "CG":
3362         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
3363             f           = CostFunction,
3364             x0          = Xini,
3365             fprime      = GradientOfCostFunction,
3366             args        = (),
3367             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3368             gtol        = selfA._parameters["GradientNormTolerance"],
3369             disp        = selfA._parameters["optdisp"],
3370             full_output = True,
3371             )
3372     elif selfA._parameters["Minimizer"] == "NCG":
3373         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
3374             f           = CostFunction,
3375             x0          = Xini,
3376             fprime      = GradientOfCostFunction,
3377             args        = (),
3378             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3379             avextol     = selfA._parameters["CostDecrementTolerance"],
3380             disp        = selfA._parameters["optdisp"],
3381             full_output = True,
3382             )
3383     elif selfA._parameters["Minimizer"] == "BFGS":
3384         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
3385             f           = CostFunction,
3386             x0          = Xini,
3387             fprime      = GradientOfCostFunction,
3388             args        = (),
3389             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3390             gtol        = selfA._parameters["GradientNormTolerance"],
3391             disp        = selfA._parameters["optdisp"],
3392             full_output = True,
3393             )
3394     else:
3395         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
3396     #
3397     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3398     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
3399     #
3400     # Correction pour pallier a un bug de TNC sur le retour du Minimum
3401     # ----------------------------------------------------------------
3402     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
3403         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
3404     #
3405     Xa = Minimum
3406     #--------------------------
3407     #
3408     selfA.StoredVariables["Analysis"].store( Xa )
3409     #
3410     if selfA._toStore("OMA") or \
3411         selfA._toStore("SigmaObs2") or \
3412         selfA._toStore("SimulationQuantiles") or \
3413         selfA._toStore("SimulatedObservationAtOptimum"):
3414         if selfA._toStore("SimulatedObservationAtCurrentState"):
3415             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
3416         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3417             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
3418         else:
3419             HXa = Hm( Xa )
3420     #
3421     if selfA._toStore("APosterioriCovariance") or \
3422         selfA._toStore("SimulationQuantiles") or \
3423         selfA._toStore("JacobianMatrixAtOptimum") or \
3424         selfA._toStore("KalmanGainAtOptimum"):
3425         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
3426         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
3427     if selfA._toStore("APosterioriCovariance") or \
3428         selfA._toStore("SimulationQuantiles") or \
3429         selfA._toStore("KalmanGainAtOptimum"):
3430         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
3431         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
3432     if selfA._toStore("APosterioriCovariance") or \
3433         selfA._toStore("SimulationQuantiles"):
3434         A = HessienneEstimation(Xa.size, HaM, HtM, BI, RI)
3435     if selfA._toStore("APosterioriCovariance"):
3436         selfA.StoredVariables["APosterioriCovariance"].store( A )
3437     if selfA._toStore("JacobianMatrixAtOptimum"):
3438         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
3439     if selfA._toStore("KalmanGainAtOptimum"):
3440         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
3441         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
3442         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
3443     #
3444     # Calculs et/ou stockages supplémentaires
3445     # ---------------------------------------
3446     if selfA._toStore("Innovation") or \
3447         selfA._toStore("SigmaObs2") or \
3448         selfA._toStore("MahalanobisConsistency") or \
3449         selfA._toStore("OMB"):
3450         d  = Y - HXb
3451     if selfA._toStore("Innovation"):
3452         selfA.StoredVariables["Innovation"].store( d )
3453     if selfA._toStore("BMA"):
3454         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
3455     if selfA._toStore("OMA"):
3456         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
3457     if selfA._toStore("OMB"):
3458         selfA.StoredVariables["OMB"].store( d )
3459     if selfA._toStore("SigmaObs2"):
3460         TraceR = R.trace(Y.size)
3461         selfA.StoredVariables["SigmaObs2"].store( float( (d.T @ (numpy.ravel(Y)-numpy.ravel(HXa))) ) / TraceR )
3462     if selfA._toStore("MahalanobisConsistency"):
3463         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
3464     if selfA._toStore("SimulationQuantiles"):
3465         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
3466     if selfA._toStore("SimulatedObservationAtBackground"):
3467         selfA.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
3468     if selfA._toStore("SimulatedObservationAtOptimum"):
3469         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
3470     #
3471     return 0
3472
3473 # ==============================================================================
3474 def std4dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3475     """
3476     4DVAR
3477     """
3478     #
3479     # Initialisations
3480     # ---------------
3481     #
3482     # Opérateurs
3483     Hm = HO["Direct"].appliedControledFormTo
3484     Mm = EM["Direct"].appliedControledFormTo
3485     #
3486     if CM is not None and "Tangent" in CM and U is not None:
3487         Cm = CM["Tangent"].asMatrix(Xb)
3488     else:
3489         Cm = None
3490     #
3491     def Un(_step):
3492         if U is not None:
3493             if hasattr(U,"store") and 1<=_step<len(U) :
3494                 _Un = numpy.asmatrix(numpy.ravel( U[_step] )).T
3495             elif hasattr(U,"store") and len(U)==1:
3496                 _Un = numpy.asmatrix(numpy.ravel( U[0] )).T
3497             else:
3498                 _Un = numpy.asmatrix(numpy.ravel( U )).T
3499         else:
3500             _Un = None
3501         return _Un
3502     def CmUn(_xn,_un):
3503         if Cm is not None and _un is not None: # Attention : si Cm est aussi dans M, doublon !
3504             _Cm   = Cm.reshape(_xn.size,_un.size) # ADAO & check shape
3505             _CmUn = _Cm * _un
3506         else:
3507             _CmUn = 0.
3508         return _CmUn
3509     #
3510     # Remarque : les observations sont exploitées à partir du pas de temps
3511     # numéro 1, et sont utilisées dans Yo comme rangées selon ces indices.
3512     # Donc le pas 0 n'est pas utilisé puisque la première étape commence
3513     # avec l'observation du pas 1.
3514     #
3515     # Nombre de pas identique au nombre de pas d'observations
3516     if hasattr(Y,"stepnumber"):
3517         duration = Y.stepnumber()
3518     else:
3519         duration = 2
3520     #
3521     # Précalcul des inversions de B et R
3522     BI = B.getI()
3523     RI = R.getI()
3524     #
3525     # Point de démarrage de l'optimisation
3526     Xini = selfA._parameters["InitializationPoint"]
3527     #
3528     # Définition de la fonction-coût
3529     # ------------------------------
3530     selfA.DirectCalculation = [None,] # Le pas 0 n'est pas observé
3531     selfA.DirectInnovation  = [None,] # Le pas 0 n'est pas observé
3532     def CostFunction(x):
3533         _X  = numpy.asmatrix(numpy.ravel( x )).T
3534         if selfA._parameters["StoreInternalVariables"] or \
3535             selfA._toStore("CurrentState") or \
3536             selfA._toStore("CurrentOptimum"):
3537             selfA.StoredVariables["CurrentState"].store( _X )
3538         Jb  = float( 0.5 * (_X - Xb).T * BI * (_X - Xb) )
3539         selfA.DirectCalculation = [None,]
3540         selfA.DirectInnovation  = [None,]
3541         Jo  = 0.
3542         _Xn = _X
3543         for step in range(0,duration-1):
3544             if hasattr(Y,"store"):
3545                 _Ynpu = numpy.asmatrix(numpy.ravel( Y[step+1] )).T
3546             else:
3547                 _Ynpu = numpy.asmatrix(numpy.ravel( Y )).T
3548             _Un = Un(step)
3549             #
3550             # Etape d'évolution
3551             if selfA._parameters["EstimationOf"] == "State":
3552                 _Xn = Mm( (_Xn, _Un) ) + CmUn(_Xn, _Un)
3553             elif selfA._parameters["EstimationOf"] == "Parameters":
3554                 pass
3555             #
3556             if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3557                 _Xn = ApplyBounds( _Xn, ForceNumericBounds(selfA._parameters["Bounds"]) )
3558             #
3559             # Etape de différence aux observations
3560             if selfA._parameters["EstimationOf"] == "State":
3561                 _YmHMX = _Ynpu - numpy.asmatrix(numpy.ravel( Hm( (_Xn, None) ) )).T
3562             elif selfA._parameters["EstimationOf"] == "Parameters":
3563                 _YmHMX = _Ynpu - numpy.asmatrix(numpy.ravel( Hm( (_Xn, _Un) ) )).T - CmUn(_Xn, _Un)
3564             #
3565             # Stockage de l'état
3566             selfA.DirectCalculation.append( _Xn )
3567             selfA.DirectInnovation.append( _YmHMX )
3568             #
3569             # Ajout dans la fonctionnelle d'observation
3570             Jo = Jo + 0.5 * float( _YmHMX.T * RI * _YmHMX )
3571         J = Jb + Jo
3572         #
3573         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
3574         selfA.StoredVariables["CostFunctionJb"].store( Jb )
3575         selfA.StoredVariables["CostFunctionJo"].store( Jo )
3576         selfA.StoredVariables["CostFunctionJ" ].store( J )
3577         if selfA._toStore("IndexOfOptimum") or \
3578             selfA._toStore("CurrentOptimum") or \
3579             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
3580             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
3581             selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3582             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3583         if selfA._toStore("IndexOfOptimum"):
3584             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3585         if selfA._toStore("CurrentOptimum"):
3586             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
3587         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3588             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3589         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3590             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3591         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3592             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3593         return J
3594     #
3595     def GradientOfCostFunction(x):
3596         _X      = numpy.asmatrix(numpy.ravel( x )).T
3597         GradJb  = BI * (_X - Xb)
3598         GradJo  = 0.
3599         for step in range(duration-1,0,-1):
3600             # Étape de récupération du dernier stockage de l'évolution
3601             _Xn = selfA.DirectCalculation.pop()
3602             # Étape de récupération du dernier stockage de l'innovation
3603             _YmHMX = selfA.DirectInnovation.pop()
3604             # Calcul des adjoints
3605             Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = _Xn)
3606             Ha = Ha.reshape(_Xn.size,_YmHMX.size) # ADAO & check shape
3607             Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = _Xn)
3608             Ma = Ma.reshape(_Xn.size,_Xn.size) # ADAO & check shape
3609             # Calcul du gradient par état adjoint
3610             GradJo = GradJo + Ha * (RI * _YmHMX) # Équivaut pour Ha linéaire à : Ha( (_Xn, RI * _YmHMX) )
3611             GradJo = Ma * GradJo                 # Équivaut pour Ma linéaire à : Ma( (_Xn, GradJo) )
3612         GradJ = numpy.ravel( GradJb ) - numpy.ravel( GradJo )
3613         return GradJ
3614     #
3615     # Minimisation de la fonctionnelle
3616     # --------------------------------
3617     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
3618     #
3619     if selfA._parameters["Minimizer"] == "LBFGSB":
3620         if "0.19" <= scipy.version.version <= "1.1.0":
3621             import lbfgsbhlt as optimiseur
3622         else:
3623             import scipy.optimize as optimiseur
3624         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
3625             func        = CostFunction,
3626             x0          = Xini,
3627             fprime      = GradientOfCostFunction,
3628             args        = (),
3629             bounds      = selfA._parameters["Bounds"],
3630             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
3631             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
3632             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3633             iprint      = selfA._parameters["optiprint"],
3634             )
3635         nfeval = Informations['funcalls']
3636         rc     = Informations['warnflag']
3637     elif selfA._parameters["Minimizer"] == "TNC":
3638         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
3639             func        = CostFunction,
3640             x0          = Xini,
3641             fprime      = GradientOfCostFunction,
3642             args        = (),
3643             bounds      = selfA._parameters["Bounds"],
3644             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
3645             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3646             ftol        = selfA._parameters["CostDecrementTolerance"],
3647             messages    = selfA._parameters["optmessages"],
3648             )
3649     elif selfA._parameters["Minimizer"] == "CG":
3650         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
3651             f           = CostFunction,
3652             x0          = Xini,
3653             fprime      = GradientOfCostFunction,
3654             args        = (),
3655             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3656             gtol        = selfA._parameters["GradientNormTolerance"],
3657             disp        = selfA._parameters["optdisp"],
3658             full_output = True,
3659             )
3660     elif selfA._parameters["Minimizer"] == "NCG":
3661         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
3662             f           = CostFunction,
3663             x0          = Xini,
3664             fprime      = GradientOfCostFunction,
3665             args        = (),
3666             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3667             avextol     = selfA._parameters["CostDecrementTolerance"],
3668             disp        = selfA._parameters["optdisp"],
3669             full_output = True,
3670             )
3671     elif selfA._parameters["Minimizer"] == "BFGS":
3672         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
3673             f           = CostFunction,
3674             x0          = Xini,
3675             fprime      = GradientOfCostFunction,
3676             args        = (),
3677             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3678             gtol        = selfA._parameters["GradientNormTolerance"],
3679             disp        = selfA._parameters["optdisp"],
3680             full_output = True,
3681             )
3682     else:
3683         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
3684     #
3685     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3686     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
3687     #
3688     # Correction pour pallier a un bug de TNC sur le retour du Minimum
3689     # ----------------------------------------------------------------
3690     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
3691         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
3692     #
3693     # Obtention de l'analyse
3694     # ----------------------
3695     Xa = Minimum
3696     #
3697     selfA.StoredVariables["Analysis"].store( Xa )
3698     #
3699     # Calculs et/ou stockages supplémentaires
3700     # ---------------------------------------
3701     if selfA._toStore("BMA"):
3702         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
3703     #
3704     return 0
3705
3706 # ==============================================================================
3707 def stdkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3708     """
3709     Standard Kalman Filter
3710     """
3711     if selfA._parameters["EstimationOf"] == "Parameters":
3712         selfA._parameters["StoreInternalVariables"] = True
3713     #
3714     # Opérateurs
3715     # ----------
3716     Ht = HO["Tangent"].asMatrix(Xb)
3717     Ha = HO["Adjoint"].asMatrix(Xb)
3718     #
3719     if selfA._parameters["EstimationOf"] == "State":
3720         Mt = EM["Tangent"].asMatrix(Xb)
3721         Ma = EM["Adjoint"].asMatrix(Xb)
3722     #
3723     if CM is not None and "Tangent" in CM and U is not None:
3724         Cm = CM["Tangent"].asMatrix(Xb)
3725     else:
3726         Cm = None
3727     #
3728     # Durée d'observation et tailles
3729     if hasattr(Y,"stepnumber"):
3730         duration = Y.stepnumber()
3731         __p = numpy.cumprod(Y.shape())[-1]
3732     else:
3733         duration = 2
3734         __p = numpy.array(Y).size
3735     #
3736     # Précalcul des inversions de B et R
3737     if selfA._parameters["StoreInternalVariables"] \
3738         or selfA._toStore("CostFunctionJ") \
3739         or selfA._toStore("CostFunctionJb") \
3740         or selfA._toStore("CostFunctionJo") \
3741         or selfA._toStore("CurrentOptimum") \
3742         or selfA._toStore("APosterioriCovariance"):
3743         BI = B.getI()
3744         RI = R.getI()
3745     #
3746     __n = Xb.size
3747     #
3748     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
3749         Xn = Xb
3750         Pn = B
3751         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3752         selfA.StoredVariables["Analysis"].store( Xb )
3753         if selfA._toStore("APosterioriCovariance"):
3754             if hasattr(B,"asfullmatrix"):
3755                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
3756             else:
3757                 selfA.StoredVariables["APosterioriCovariance"].store( B )
3758         selfA._setInternalState("seed", numpy.random.get_state())
3759     elif selfA._parameters["nextStep"]:
3760         Xn = selfA._getInternalState("Xn")
3761         Pn = selfA._getInternalState("Pn")
3762     #
3763     if selfA._parameters["EstimationOf"] == "Parameters":
3764         XaMin            = Xn
3765         previousJMinimum = numpy.finfo(float).max
3766     #
3767     for step in range(duration-1):
3768         if hasattr(Y,"store"):
3769             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
3770         else:
3771             Ynpu = numpy.ravel( Y ).reshape((__p,1))
3772         #
3773         if U is not None:
3774             if hasattr(U,"store") and len(U)>1:
3775                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
3776             elif hasattr(U,"store") and len(U)==1:
3777                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
3778             else:
3779                 Un = numpy.asmatrix(numpy.ravel( U )).T
3780         else:
3781             Un = None
3782         #
3783         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
3784             Xn_predicted = Mt * Xn
3785             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
3786                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
3787                 Xn_predicted = Xn_predicted + Cm * Un
3788             Pn_predicted = Q + Mt * (Pn * Ma)
3789         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
3790             # --- > Par principe, M = Id, Q = 0
3791             Xn_predicted = Xn
3792             Pn_predicted = Pn
3793         #
3794         if selfA._parameters["EstimationOf"] == "State":
3795             HX_predicted = Ht * Xn_predicted
3796             _Innovation  = Ynpu - HX_predicted
3797         elif selfA._parameters["EstimationOf"] == "Parameters":
3798             HX_predicted = Ht * Xn_predicted
3799             _Innovation  = Ynpu - HX_predicted
3800             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
3801                 _Innovation = _Innovation - Cm * Un
3802         #
3803         Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
3804         Xn = Xn_predicted + Kn * _Innovation
3805         Pn = Pn_predicted - Kn * Ht * Pn_predicted
3806         #
3807         Xa = Xn # Pointeurs
3808         #--------------------------
3809         selfA._setInternalState("Xn", Xn)
3810         selfA._setInternalState("Pn", Pn)
3811         #--------------------------
3812         #
3813         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3814         # ---> avec analysis
3815         selfA.StoredVariables["Analysis"].store( Xa )
3816         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
3817             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( Ht * Xa )
3818         if selfA._toStore("InnovationAtCurrentAnalysis"):
3819             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
3820         # ---> avec current state
3821         if selfA._parameters["StoreInternalVariables"] \
3822             or selfA._toStore("CurrentState"):
3823             selfA.StoredVariables["CurrentState"].store( Xn )
3824         if selfA._toStore("ForecastState"):
3825             selfA.StoredVariables["ForecastState"].store( Xn_predicted )
3826         if selfA._toStore("ForecastCovariance"):
3827             selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted )
3828         if selfA._toStore("BMA"):
3829             selfA.StoredVariables["BMA"].store( Xn_predicted - Xa )
3830         if selfA._toStore("InnovationAtCurrentState"):
3831             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
3832         if selfA._toStore("SimulatedObservationAtCurrentState") \
3833             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3834             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
3835         # ---> autres
3836         if selfA._parameters["StoreInternalVariables"] \
3837             or selfA._toStore("CostFunctionJ") \
3838             or selfA._toStore("CostFunctionJb") \
3839             or selfA._toStore("CostFunctionJo") \
3840             or selfA._toStore("CurrentOptimum") \
3841             or selfA._toStore("APosterioriCovariance"):
3842             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
3843             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
3844             J   = Jb + Jo
3845             selfA.StoredVariables["CostFunctionJb"].store( Jb )
3846             selfA.StoredVariables["CostFunctionJo"].store( Jo )
3847             selfA.StoredVariables["CostFunctionJ" ].store( J )
3848             #
3849             if selfA._toStore("IndexOfOptimum") \
3850                 or selfA._toStore("CurrentOptimum") \
3851                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
3852                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
3853                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
3854                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3855                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3856             if selfA._toStore("IndexOfOptimum"):
3857                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3858             if selfA._toStore("CurrentOptimum"):
3859                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
3860             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3861                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
3862             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3863                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3864             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3865                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3866             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3867                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3868         if selfA._toStore("APosterioriCovariance"):
3869             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
3870         if selfA._parameters["EstimationOf"] == "Parameters" \
3871             and J < previousJMinimum:
3872             previousJMinimum    = J
3873             XaMin               = Xa
3874             if selfA._toStore("APosterioriCovariance"):
3875                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
3876     #
3877     # Stockage final supplémentaire de l'optimum en estimation de paramètres
3878     # ----------------------------------------------------------------------
3879     if selfA._parameters["EstimationOf"] == "Parameters":
3880         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3881         selfA.StoredVariables["Analysis"].store( XaMin )
3882         if selfA._toStore("APosterioriCovariance"):
3883             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
3884         if selfA._toStore("BMA"):
3885             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
3886     #
3887     return 0
3888
3889 # ==============================================================================
3890 def uskf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3891     """
3892     Unscented Kalman Filter
3893     """
3894     if selfA._parameters["EstimationOf"] == "Parameters":
3895         selfA._parameters["StoreInternalVariables"] = True
3896     #
3897     L     = Xb.size
3898     Alpha = selfA._parameters["Alpha"]
3899     Beta  = selfA._parameters["Beta"]
3900     if selfA._parameters["Kappa"] == 0:
3901         if selfA._parameters["EstimationOf"] == "State":
3902             Kappa = 0
3903         elif selfA._parameters["EstimationOf"] == "Parameters":
3904             Kappa = 3 - L
3905     else:
3906         Kappa = selfA._parameters["Kappa"]
3907     Lambda = float( Alpha**2 ) * ( L + Kappa ) - L
3908     Gamma  = math.sqrt( L + Lambda )
3909     #
3910     Ww = []
3911     Ww.append( 0. )
3912     for i in range(2*L):
3913         Ww.append( 1. / (2.*(L + Lambda)) )
3914     #
3915     Wm = numpy.array( Ww )
3916     Wm[0] = Lambda / (L + Lambda)
3917     Wc = numpy.array( Ww )
3918     Wc[0] = Lambda / (L + Lambda) + (1. - Alpha**2 + Beta)
3919     #
3920     # Opérateurs
3921     Hm = HO["Direct"].appliedControledFormTo
3922     #
3923     if selfA._parameters["EstimationOf"] == "State":
3924         Mm = EM["Direct"].appliedControledFormTo
3925     #
3926     if CM is not None and "Tangent" in CM and U is not None:
3927         Cm = CM["Tangent"].asMatrix(Xb)
3928     else:
3929         Cm = None
3930     #
3931     # Durée d'observation et tailles
3932     if hasattr(Y,"stepnumber"):
3933         duration = Y.stepnumber()
3934         __p = numpy.cumprod(Y.shape())[-1]
3935     else:
3936         duration = 2
3937         __p = numpy.array(Y).size
3938     #
3939     # Précalcul des inversions de B et R
3940     if selfA._parameters["StoreInternalVariables"] \
3941         or selfA._toStore("CostFunctionJ") \
3942         or selfA._toStore("CostFunctionJb") \
3943         or selfA._toStore("CostFunctionJo") \
3944         or selfA._toStore("CurrentOptimum") \
3945         or selfA._toStore("APosterioriCovariance"):
3946         BI = B.getI()
3947         RI = R.getI()
3948     #
3949     __n = Xb.size
3950     #
3951     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
3952         Xn = Xb
3953         if hasattr(B,"asfullmatrix"):
3954             Pn = B.asfullmatrix(__n)
3955         else:
3956             Pn = B
3957         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3958         selfA.StoredVariables["Analysis"].store( Xb )
3959         if selfA._toStore("APosterioriCovariance"):
3960             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
3961     elif selfA._parameters["nextStep"]:
3962         Xn = selfA._getInternalState("Xn")
3963         Pn = selfA._getInternalState("Pn")
3964     #
3965     if selfA._parameters["EstimationOf"] == "Parameters":
3966         XaMin            = Xn
3967         previousJMinimum = numpy.finfo(float).max
3968     #
3969     for step in range(duration-1):
3970         if hasattr(Y,"store"):
3971             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
3972         else:
3973             Ynpu = numpy.ravel( Y ).reshape((__p,1))
3974         #
3975         if U is not None:
3976             if hasattr(U,"store") and len(U)>1:
3977                 Un = numpy.ravel( U[step] ).reshape((-1,1))
3978             elif hasattr(U,"store") and len(U)==1:
3979                 Un = numpy.ravel( U[0] ).reshape((-1,1))
3980             else:
3981                 Un = numpy.ravel( U ).reshape((-1,1))
3982         else:
3983             Un = None
3984         #
3985         Pndemi = numpy.real(scipy.linalg.sqrtm(Pn))
3986         Xnp = numpy.hstack([Xn, Xn+Gamma*Pndemi, Xn-Gamma*Pndemi])
3987         nbSpts = 2*Xn.size+1
3988         #
3989         XEtnnp = []
3990         for point in range(nbSpts):
3991             if selfA._parameters["EstimationOf"] == "State":
3992                 XEtnnpi = numpy.asarray( Mm( (Xnp[:,point], Un) ) ).reshape((-1,1))
3993                 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
3994                     Cm = Cm.reshape(Xn.size,Un.size) # ADAO & check shape
3995                     XEtnnpi = XEtnnpi + Cm * Un
3996             elif selfA._parameters["EstimationOf"] == "Parameters":
3997                 # --- > Par principe, M = Id, Q = 0
3998                 XEtnnpi = Xnp[:,point]
3999             XEtnnp.append( numpy.ravel(XEtnnpi).reshape((-1,1)) )
4000         XEtnnp = numpy.concatenate( XEtnnp, axis=1 )
4001         #
4002         Xncm = ( XEtnnp * Wm ).sum(axis=1)
4003         #
4004         if selfA._parameters["EstimationOf"] == "State":        Pnm = Q
4005         elif selfA._parameters["EstimationOf"] == "Parameters": Pnm = 0.
4006         for point in range(nbSpts):
4007             Pnm += Wc[i] * ((XEtnnp[:,point]-Xncm).reshape((-1,1)) * (XEtnnp[:,point]-Xncm))
4008         #
4009         Pnmdemi = numpy.real(scipy.linalg.sqrtm(Pnm))
4010         #
4011         Xnnp = numpy.hstack([Xncm.reshape((-1,1)), Xncm.reshape((-1,1))+Gamma*Pnmdemi, Xncm.reshape((-1,1))-Gamma*Pnmdemi])
4012         #
4013         Ynnp = []
4014         for point in range(nbSpts):
4015             if selfA._parameters["EstimationOf"] == "State":
4016                 Ynnpi = Hm( (Xnnp[:,point], None) )
4017             elif selfA._parameters["EstimationOf"] == "Parameters":
4018                 Ynnpi = Hm( (Xnnp[:,point], Un) )
4019             Ynnp.append( numpy.ravel(Ynnpi).reshape((-1,1)) )
4020         Ynnp = numpy.concatenate( Ynnp, axis=1 )
4021         #
4022         Yncm = ( Ynnp * Wm ).sum(axis=1)
4023         #
4024         Pyyn = R
4025         Pxyn = 0.
4026         for point in range(nbSpts):
4027             Pyyn += Wc[i] * ((Ynnp[:,point]-Yncm).reshape((-1,1)) * (Ynnp[:,point]-Yncm))
4028             Pxyn += Wc[i] * ((Xnnp[:,point]-Xncm).reshape((-1,1)) * (Ynnp[:,point]-Yncm))
4029         #
4030         _Innovation  = Ynpu - Yncm.reshape((-1,1))
4031         if selfA._parameters["EstimationOf"] == "Parameters":
4032             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
4033                 _Innovation = _Innovation - Cm * Un
4034         #
4035         Kn = Pxyn * Pyyn.I
4036         Xn = Xncm.reshape((-1,1)) + Kn * _Innovation
4037         Pn = Pnm - Kn * Pyyn * Kn.T
4038         #
4039         Xa = Xn # Pointeurs
4040         #--------------------------
4041         selfA._setInternalState("Xn", Xn)
4042         selfA._setInternalState("Pn", Pn)
4043         #--------------------------
4044         #
4045         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
4046         # ---> avec analysis
4047         selfA.StoredVariables["Analysis"].store( Xa )
4048         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
4049             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( Hm((Xa, Un)) )
4050         if selfA._toStore("InnovationAtCurrentAnalysis"):
4051             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
4052         # ---> avec current state
4053         if selfA._parameters["StoreInternalVariables"] \
4054             or selfA._toStore("CurrentState"):
4055             selfA.StoredVariables["CurrentState"].store( Xn )
4056         if selfA._toStore("ForecastState"):
4057             selfA.StoredVariables["ForecastState"].store( Xncm )
4058         if selfA._toStore("ForecastCovariance"):
4059             selfA.StoredVariables["ForecastCovariance"].store( Pnm )
4060         if selfA._toStore("BMA"):
4061             selfA.StoredVariables["BMA"].store( Xncm - Xa )
4062         if selfA._toStore("InnovationAtCurrentState"):
4063             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
4064         if selfA._toStore("SimulatedObservationAtCurrentState") \
4065             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
4066             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( Yncm )
4067         # ---> autres
4068         if selfA._parameters["StoreInternalVariables"] \
4069             or selfA._toStore("CostFunctionJ") \
4070             or selfA._toStore("CostFunctionJb") \
4071             or selfA._toStore("CostFunctionJo") \
4072             or selfA._toStore("CurrentOptimum") \
4073             or selfA._toStore("APosterioriCovariance"):
4074             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
4075             Jo  = float( 0.5 * _Innovation.T * (RI * _Innovation) )
4076             J   = Jb + Jo
4077             selfA.StoredVariables["CostFunctionJb"].store( Jb )
4078             selfA.StoredVariables["CostFunctionJo"].store( Jo )
4079             selfA.StoredVariables["CostFunctionJ" ].store( J )
4080             #
4081             if selfA._toStore("IndexOfOptimum") \
4082                 or selfA._toStore("CurrentOptimum") \
4083                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
4084                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
4085                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
4086                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
4087                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
4088             if selfA._toStore("IndexOfOptimum"):
4089                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
4090             if selfA._toStore("CurrentOptimum"):
4091                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
4092             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
4093                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
4094             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
4095                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
4096             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
4097                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
4098             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
4099                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
4100         if selfA._toStore("APosterioriCovariance"):
4101             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
4102         if selfA._parameters["EstimationOf"] == "Parameters" \
4103             and J < previousJMinimum:
4104             previousJMinimum    = J
4105             XaMin               = Xa
4106             if selfA._toStore("APosterioriCovariance"):
4107                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
4108     #
4109     # Stockage final supplémentaire de l'optimum en estimation de paramètres
4110     # ----------------------------------------------------------------------
4111     if selfA._parameters["EstimationOf"] == "Parameters":
4112         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
4113         selfA.StoredVariables["Analysis"].store( XaMin )
4114         if selfA._toStore("APosterioriCovariance"):
4115             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
4116         if selfA._toStore("BMA"):
4117             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
4118     #
4119     return 0
4120
4121 # ==============================================================================
4122 def van3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
4123     """
4124     3DVAR variational analysis with no inversion of B
4125     """
4126     #
4127     # Initialisations
4128     # ---------------
4129     Hm = HO["Direct"].appliedTo
4130     Ha = HO["Adjoint"].appliedInXTo
4131     #
4132     BT = B.getT()
4133     RI = R.getI()
4134     #
4135     Xini = numpy.zeros(Xb.shape)
4136     #
4137     # Définition de la fonction-coût
4138     # ------------------------------
4139     def CostFunction(v):
4140         _V = numpy.asmatrix(numpy.ravel( v )).T
4141         _X = Xb + B * _V
4142         if selfA._parameters["StoreInternalVariables"] or \
4143             selfA._toStore("CurrentState") or \
4144             selfA._toStore("CurrentOptimum"):
4145             selfA.StoredVariables["CurrentState"].store( _X )
4146         _HX = Hm( _X )
4147         _HX = numpy.asmatrix(numpy.ravel( _HX )).T
4148         _Innovation = Y - _HX
4149         if selfA._toStore("SimulatedObservationAtCurrentState") or \
4150             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
4151             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
4152         if selfA._toStore("InnovationAtCurrentState"):
4153             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
4154         #
4155         Jb  = float( 0.5 * _V.T * BT * _V )
4156         Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
4157         J   = Jb + Jo
4158         #
4159         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
4160         selfA.StoredVariables["CostFunctionJb"].store( Jb )
4161         selfA.StoredVariables["CostFunctionJo"].store( Jo )
4162         selfA.StoredVariables["CostFunctionJ" ].store( J )
4163         if selfA._toStore("IndexOfOptimum") or \
4164             selfA._toStore("CurrentOptimum") or \
4165             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
4166             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
4167             selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
4168             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
4169             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
4170         if selfA._toStore("IndexOfOptimum"):
4171             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
4172         if selfA._toStore("CurrentOptimum"):
4173             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
4174         if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
4175             selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
4176         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
4177             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
4178         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
4179             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
4180         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
4181             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
4182         return J
4183     #
4184     def GradientOfCostFunction(v):
4185         _V = v.reshape((-1,1))
4186         _X = Xb + (B @ _V).reshape((-1,1))
4187         _HX     = Hm( _X ).reshape((-1,1))
4188         GradJb  = BT * _V
4189         GradJo  = - Ha( (_X, RI * (Y - _HX)) )
4190         GradJ   = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
4191         return GradJ
4192     #
4193     # Minimisation de la fonctionnelle
4194     # --------------------------------
4195     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
4196     #
4197     if selfA._parameters["Minimizer"] == "LBFGSB":
4198         if "0.19" <= scipy.version.version <= "1.1.0":
4199             import lbfgsbhlt as optimiseur
4200         else:
4201             import scipy.optimize as optimiseur
4202         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
4203             func        = CostFunction,
4204             x0          = Xini,
4205             fprime      = GradientOfCostFunction,
4206             args        = (),
4207             bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
4208             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
4209             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
4210             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
4211             iprint      = selfA._parameters["optiprint"],
4212             )
4213         nfeval = Informations['funcalls']
4214         rc     = Informations['warnflag']
4215     elif selfA._parameters["Minimizer"] == "TNC":
4216         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
4217             func        = CostFunction,
4218             x0          = Xini,
4219             fprime      = GradientOfCostFunction,
4220             args        = (),
4221             bounds      = RecentredBounds(selfA._parameters["Bounds"], Xb),
4222             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
4223             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
4224             ftol        = selfA._parameters["CostDecrementTolerance"],
4225             messages    = selfA._parameters["optmessages"],
4226             )
4227     elif selfA._parameters["Minimizer"] == "CG":
4228         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
4229             f           = CostFunction,
4230             x0          = Xini,
4231             fprime      = GradientOfCostFunction,
4232             args        = (),
4233             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
4234             gtol        = selfA._parameters["GradientNormTolerance"],
4235             disp        = selfA._parameters["optdisp"],
4236             full_output = True,
4237             )
4238     elif selfA._parameters["Minimizer"] == "NCG":
4239         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
4240             f           = CostFunction,
4241             x0          = Xini,
4242             fprime      = GradientOfCostFunction,
4243             args        = (),
4244             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
4245             avextol     = selfA._parameters["CostDecrementTolerance"],
4246             disp        = selfA._parameters["optdisp"],
4247             full_output = True,
4248             )
4249     elif selfA._parameters["Minimizer"] == "BFGS":
4250         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
4251             f           = CostFunction,
4252             x0          = Xini,
4253             fprime      = GradientOfCostFunction,
4254             args        = (),
4255             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
4256             gtol        = selfA._parameters["GradientNormTolerance"],
4257             disp        = selfA._parameters["optdisp"],
4258             full_output = True,
4259             )
4260     else:
4261         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
4262     #
4263     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
4264     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
4265     #
4266     # Correction pour pallier a un bug de TNC sur le retour du Minimum
4267     # ----------------------------------------------------------------
4268     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
4269         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
4270     else:
4271         Minimum = Xb + B * Minimum.reshape((-1,1)) # Pas @
4272     #
4273     Xa = Minimum
4274     #--------------------------
4275     #
4276     selfA.StoredVariables["Analysis"].store( Xa )
4277     #
4278     if selfA._toStore("OMA") or \
4279         selfA._toStore("SigmaObs2") or \
4280         selfA._toStore("SimulationQuantiles") or \
4281         selfA._toStore("SimulatedObservationAtOptimum"):
4282         if selfA._toStore("SimulatedObservationAtCurrentState"):
4283             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
4284         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
4285             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
4286         else:
4287             HXa = Hm( Xa )
4288     #
4289     if selfA._toStore("APosterioriCovariance") or \
4290         selfA._toStore("SimulationQuantiles") or \
4291         selfA._toStore("JacobianMatrixAtOptimum") or \
4292         selfA._toStore("KalmanGainAtOptimum"):
4293         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
4294         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
4295     if selfA._toStore("APosterioriCovariance") or \
4296         selfA._toStore("SimulationQuantiles") or \
4297         selfA._toStore("KalmanGainAtOptimum"):
4298         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
4299         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
4300     if selfA._toStore("APosterioriCovariance") or \
4301         selfA._toStore("SimulationQuantiles"):
4302         BI = B.getI()
4303         A = HessienneEstimation(Xa.size, HaM, HtM, BI, RI)
4304     if selfA._toStore("APosterioriCovariance"):
4305         selfA.StoredVariables["APosterioriCovariance"].store( A )
4306     if selfA._toStore("JacobianMatrixAtOptimum"):
4307         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
4308     if selfA._toStore("KalmanGainAtOptimum"):
4309         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
4310         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
4311         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
4312     #
4313     # Calculs et/ou stockages supplémentaires
4314     # ---------------------------------------
4315     if selfA._toStore("Innovation") or \
4316         selfA._toStore("SigmaObs2") or \
4317         selfA._toStore("MahalanobisConsistency") or \
4318         selfA._toStore("OMB"):
4319         d  = Y - HXb
4320     if selfA._toStore("Innovation"):
4321         selfA.StoredVariables["Innovation"].store( d )
4322     if selfA._toStore("BMA"):
4323         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
4324     if selfA._toStore("OMA"):
4325         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
4326     if selfA._toStore("OMB"):
4327         selfA.StoredVariables["OMB"].store( d )
4328     if selfA._toStore("SigmaObs2"):
4329         TraceR = R.trace(Y.size)
4330         selfA.StoredVariables["SigmaObs2"].store( float( (d.T @ (numpy.ravel(Y)-numpy.ravel(HXa))) ) / TraceR )
4331     if selfA._toStore("MahalanobisConsistency"):
4332         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
4333     if selfA._toStore("SimulationQuantiles"):
4334         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
4335     if selfA._toStore("SimulatedObservationAtBackground"):
4336         selfA.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
4337     if selfA._toStore("SimulatedObservationAtOptimum"):
4338         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
4339     #
4340     return 0
4341
4342 # ==============================================================================
4343 if __name__ == "__main__":
4344     print('\n AUTODIAGNOSTIC\n')