Salome HOME
Fixing iterating observation use (4)
[modules/adao.git] / src / daComposant / daCore / NumericObjects.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2021 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 __doc__ = """
24     Définit les objets numériques génériques.
25 """
26 __author__ = "Jean-Philippe ARGAUD"
27
28 import os, time, copy, types, sys, logging
29 import math, numpy, scipy, scipy.optimize, scipy.version
30 from daCore.BasicObjects import Operator
31 from daCore.PlatformInfo import PlatformInfo
32 mpr = PlatformInfo().MachinePrecision()
33 mfp = PlatformInfo().MaximumPrecision()
34 # logging.getLogger().setLevel(logging.DEBUG)
35
36 # ==============================================================================
37 def ExecuteFunction( triplet ):
38     assert len(triplet) == 3, "Incorrect number of arguments"
39     X, xArgs, funcrepr = triplet
40     __X = numpy.asmatrix(numpy.ravel( X )).T
41     __sys_path_tmp = sys.path ; sys.path.insert(0,funcrepr["__userFunction__path"])
42     __module = __import__(funcrepr["__userFunction__modl"], globals(), locals(), [])
43     __fonction = getattr(__module,funcrepr["__userFunction__name"])
44     sys.path = __sys_path_tmp ; del __sys_path_tmp
45     if isinstance(xArgs, dict):
46         __HX  = __fonction( __X, **xArgs )
47     else:
48         __HX  = __fonction( __X )
49     return numpy.ravel( __HX )
50
51 # ==============================================================================
52 class FDApproximation(object):
53     """
54     Cette classe sert d'interface pour définir les opérateurs approximés. A la
55     création d'un objet, en fournissant une fonction "Function", on obtient un
56     objet qui dispose de 3 méthodes "DirectOperator", "TangentOperator" et
57     "AdjointOperator". On contrôle l'approximation DF avec l'incrément
58     multiplicatif "increment" valant par défaut 1%, ou avec l'incrément fixe
59     "dX" qui sera multiplié par "increment" (donc en %), et on effectue de DF
60     centrées si le booléen "centeredDF" est vrai.
61     """
62     def __init__(self,
63             name                  = "FDApproximation",
64             Function              = None,
65             centeredDF            = False,
66             increment             = 0.01,
67             dX                    = None,
68             extraArguments        = None,
69             avoidingRedundancy    = True,
70             toleranceInRedundancy = 1.e-18,
71             lenghtOfRedundancy    = -1,
72             mpEnabled             = False,
73             mpWorkers             = None,
74             mfEnabled             = False,
75             ):
76         self.__name = str(name)
77         self.__extraArgs = extraArguments
78         if mpEnabled:
79             try:
80                 import multiprocessing
81                 self.__mpEnabled = True
82             except ImportError:
83                 self.__mpEnabled = False
84         else:
85             self.__mpEnabled = False
86         self.__mpWorkers = mpWorkers
87         if self.__mpWorkers is not None and self.__mpWorkers < 1:
88             self.__mpWorkers = None
89         logging.debug("FDA Calculs en multiprocessing : %s (nombre de processus : %s)"%(self.__mpEnabled,self.__mpWorkers))
90         #
91         if mfEnabled:
92             self.__mfEnabled = True
93         else:
94             self.__mfEnabled = False
95         logging.debug("FDA Calculs en multifonctions : %s"%(self.__mfEnabled,))
96         #
97         if avoidingRedundancy:
98             self.__avoidRC = True
99             self.__tolerBP = float(toleranceInRedundancy)
100             self.__lenghtRJ = int(lenghtOfRedundancy)
101             self.__listJPCP = [] # Jacobian Previous Calculated Points
102             self.__listJPCI = [] # Jacobian Previous Calculated Increment
103             self.__listJPCR = [] # Jacobian Previous Calculated Results
104             self.__listJPPN = [] # Jacobian Previous Calculated Point Norms
105             self.__listJPIN = [] # Jacobian Previous Calculated Increment Norms
106         else:
107             self.__avoidRC = False
108         #
109         if self.__mpEnabled:
110             if isinstance(Function,types.FunctionType):
111                 logging.debug("FDA Calculs en multiprocessing : FunctionType")
112                 self.__userFunction__name = Function.__name__
113                 try:
114                     mod = os.path.join(Function.__globals__['filepath'],Function.__globals__['filename'])
115                 except:
116                     mod = os.path.abspath(Function.__globals__['__file__'])
117                 if not os.path.isfile(mod):
118                     raise ImportError("No user defined function or method found with the name %s"%(mod,))
119                 self.__userFunction__modl = os.path.basename(mod).replace('.pyc','').replace('.pyo','').replace('.py','')
120                 self.__userFunction__path = os.path.dirname(mod)
121                 del mod
122                 self.__userOperator = Operator( name = self.__name, fromMethod = Function, avoidingRedundancy = self.__avoidRC, inputAsMultiFunction = self.__mfEnabled, extraArguments = self.__extraArgs )
123                 self.__userFunction = self.__userOperator.appliedTo # Pour le calcul Direct
124             elif isinstance(Function,types.MethodType):
125                 logging.debug("FDA Calculs en multiprocessing : MethodType")
126                 self.__userFunction__name = Function.__name__
127                 try:
128                     mod = os.path.join(Function.__globals__['filepath'],Function.__globals__['filename'])
129                 except:
130                     mod = os.path.abspath(Function.__func__.__globals__['__file__'])
131                 if not os.path.isfile(mod):
132                     raise ImportError("No user defined function or method found with the name %s"%(mod,))
133                 self.__userFunction__modl = os.path.basename(mod).replace('.pyc','').replace('.pyo','').replace('.py','')
134                 self.__userFunction__path = os.path.dirname(mod)
135                 del mod
136                 self.__userOperator = Operator( name = self.__name, fromMethod = Function, avoidingRedundancy = self.__avoidRC, inputAsMultiFunction = self.__mfEnabled, extraArguments = self.__extraArgs )
137                 self.__userFunction = self.__userOperator.appliedTo # Pour le calcul Direct
138             else:
139                 raise TypeError("User defined function or method has to be provided for finite differences approximation.")
140         else:
141             self.__userOperator = Operator( name = self.__name, fromMethod = Function, avoidingRedundancy = self.__avoidRC, inputAsMultiFunction = self.__mfEnabled, extraArguments = self.__extraArgs )
142             self.__userFunction = self.__userOperator.appliedTo
143         #
144         self.__centeredDF = bool(centeredDF)
145         if abs(float(increment)) > 1.e-15:
146             self.__increment  = float(increment)
147         else:
148             self.__increment  = 0.01
149         if dX is None:
150             self.__dX     = None
151         else:
152             self.__dX     = numpy.asmatrix(numpy.ravel( dX )).T
153         logging.debug("FDA Reduction des doublons de calcul : %s"%self.__avoidRC)
154         if self.__avoidRC:
155             logging.debug("FDA Tolerance de determination des doublons : %.2e"%self.__tolerBP)
156
157     # ---------------------------------------------------------
158     def __doublon__(self, e, l, n, v=None):
159         __ac, __iac = False, -1
160         for i in range(len(l)-1,-1,-1):
161             if numpy.linalg.norm(e - l[i]) < self.__tolerBP * n[i]:
162                 __ac, __iac = True, i
163                 if v is not None: logging.debug("FDA Cas%s déja calculé, récupération du doublon %i"%(v,__iac))
164                 break
165         return __ac, __iac
166
167     # ---------------------------------------------------------
168     def DirectOperator(self, X, **extraArgs ):
169         """
170         Calcul du direct à l'aide de la fonction fournie.
171
172         NB : les extraArgs sont là pour assurer la compatibilité d'appel, mais
173         ne doivent pas être données ici à la fonction utilisateur.
174         """
175         logging.debug("FDA Calcul DirectOperator (explicite)")
176         if self.__mfEnabled:
177             _HX = self.__userFunction( X, argsAsSerie = True )
178         else:
179             _X = numpy.asmatrix(numpy.ravel( X )).T
180             _HX = numpy.ravel(self.__userFunction( _X ))
181         #
182         return _HX
183
184     # ---------------------------------------------------------
185     def TangentMatrix(self, X ):
186         """
187         Calcul de l'opérateur tangent comme la Jacobienne par différences finies,
188         c'est-à-dire le gradient de H en X. On utilise des différences finies
189         directionnelles autour du point X. X est un numpy.matrix.
190
191         Différences finies centrées (approximation d'ordre 2):
192         1/ Pour chaque composante i de X, on ajoute et on enlève la perturbation
193            dX[i] à la  composante X[i], pour composer X_plus_dXi et X_moins_dXi, et
194            on calcule les réponses HX_plus_dXi = H( X_plus_dXi ) et HX_moins_dXi =
195            H( X_moins_dXi )
196         2/ On effectue les différences (HX_plus_dXi-HX_moins_dXi) et on divise par
197            le pas 2*dXi
198         3/ Chaque résultat, par composante, devient une colonne de la Jacobienne
199
200         Différences finies non centrées (approximation d'ordre 1):
201         1/ Pour chaque composante i de X, on ajoute la perturbation dX[i] à la
202            composante X[i] pour composer X_plus_dXi, et on calcule la réponse
203            HX_plus_dXi = H( X_plus_dXi )
204         2/ On calcule la valeur centrale HX = H(X)
205         3/ On effectue les différences (HX_plus_dXi-HX) et on divise par
206            le pas dXi
207         4/ Chaque résultat, par composante, devient une colonne de la Jacobienne
208
209         """
210         logging.debug("FDA Début du calcul de la Jacobienne")
211         logging.debug("FDA   Incrément de............: %s*X"%float(self.__increment))
212         logging.debug("FDA   Approximation centrée...: %s"%(self.__centeredDF))
213         #
214         if X is None or len(X)==0:
215             raise ValueError("Nominal point X for approximate derivatives can not be None or void (given X: %s)."%(str(X),))
216         #
217         _X = numpy.asmatrix(numpy.ravel( X )).T
218         #
219         if self.__dX is None:
220             _dX  = self.__increment * _X
221         else:
222             _dX = numpy.asmatrix(numpy.ravel( self.__dX )).T
223         #
224         if (_dX == 0.).any():
225             moyenne = _dX.mean()
226             if moyenne == 0.:
227                 _dX = numpy.where( _dX == 0., float(self.__increment), _dX )
228             else:
229                 _dX = numpy.where( _dX == 0., moyenne, _dX )
230         #
231         __alreadyCalculated  = False
232         if self.__avoidRC:
233             __bidon, __alreadyCalculatedP = self.__doublon__(_X,  self.__listJPCP, self.__listJPPN, None)
234             __bidon, __alreadyCalculatedI = self.__doublon__(_dX, self.__listJPCI, self.__listJPIN, None)
235             if __alreadyCalculatedP == __alreadyCalculatedI > -1:
236                 __alreadyCalculated, __i = True, __alreadyCalculatedP
237                 logging.debug("FDA Cas J déja calculé, récupération du doublon %i"%__i)
238         #
239         if __alreadyCalculated:
240             logging.debug("FDA   Calcul Jacobienne (par récupération du doublon %i)"%__i)
241             _Jacobienne = self.__listJPCR[__i]
242         else:
243             logging.debug("FDA   Calcul Jacobienne (explicite)")
244             if self.__centeredDF:
245                 #
246                 if self.__mpEnabled and not self.__mfEnabled:
247                     funcrepr = {
248                         "__userFunction__path" : self.__userFunction__path,
249                         "__userFunction__modl" : self.__userFunction__modl,
250                         "__userFunction__name" : self.__userFunction__name,
251                     }
252                     _jobs = []
253                     for i in range( len(_dX) ):
254                         _dXi            = _dX[i]
255                         _X_plus_dXi     = numpy.array( _X.A1, dtype=float )
256                         _X_plus_dXi[i]  = _X[i] + _dXi
257                         _X_moins_dXi    = numpy.array( _X.A1, dtype=float )
258                         _X_moins_dXi[i] = _X[i] - _dXi
259                         #
260                         _jobs.append( (_X_plus_dXi,  self.__extraArgs, funcrepr) )
261                         _jobs.append( (_X_moins_dXi, self.__extraArgs, funcrepr) )
262                     #
263                     import multiprocessing
264                     self.__pool = multiprocessing.Pool(self.__mpWorkers)
265                     _HX_plusmoins_dX = self.__pool.map( ExecuteFunction, _jobs )
266                     self.__pool.close()
267                     self.__pool.join()
268                     #
269                     _Jacobienne  = []
270                     for i in range( len(_dX) ):
271                         _Jacobienne.append( numpy.ravel( _HX_plusmoins_dX[2*i] - _HX_plusmoins_dX[2*i+1] ) / (2.*_dX[i]) )
272                     #
273                 elif self.__mfEnabled:
274                     _xserie = []
275                     for i in range( len(_dX) ):
276                         _dXi            = _dX[i]
277                         _X_plus_dXi     = numpy.array( _X.A1, dtype=float )
278                         _X_plus_dXi[i]  = _X[i] + _dXi
279                         _X_moins_dXi    = numpy.array( _X.A1, dtype=float )
280                         _X_moins_dXi[i] = _X[i] - _dXi
281                         #
282                         _xserie.append( _X_plus_dXi )
283                         _xserie.append( _X_moins_dXi )
284                     #
285                     _HX_plusmoins_dX = self.DirectOperator( _xserie )
286                      #
287                     _Jacobienne  = []
288                     for i in range( len(_dX) ):
289                         _Jacobienne.append( numpy.ravel( _HX_plusmoins_dX[2*i] - _HX_plusmoins_dX[2*i+1] ) / (2.*_dX[i]) )
290                     #
291                 else:
292                     _Jacobienne  = []
293                     for i in range( _dX.size ):
294                         _dXi            = _dX[i]
295                         _X_plus_dXi     = numpy.array( _X.A1, dtype=float )
296                         _X_plus_dXi[i]  = _X[i] + _dXi
297                         _X_moins_dXi    = numpy.array( _X.A1, dtype=float )
298                         _X_moins_dXi[i] = _X[i] - _dXi
299                         #
300                         _HX_plus_dXi    = self.DirectOperator( _X_plus_dXi )
301                         _HX_moins_dXi   = self.DirectOperator( _X_moins_dXi )
302                         #
303                         _Jacobienne.append( numpy.ravel( _HX_plus_dXi - _HX_moins_dXi ) / (2.*_dXi) )
304                 #
305             else:
306                 #
307                 if self.__mpEnabled and not self.__mfEnabled:
308                     funcrepr = {
309                         "__userFunction__path" : self.__userFunction__path,
310                         "__userFunction__modl" : self.__userFunction__modl,
311                         "__userFunction__name" : self.__userFunction__name,
312                     }
313                     _jobs = []
314                     _jobs.append( (_X.A1, self.__extraArgs, funcrepr) )
315                     for i in range( len(_dX) ):
316                         _X_plus_dXi    = numpy.array( _X.A1, dtype=float )
317                         _X_plus_dXi[i] = _X[i] + _dX[i]
318                         #
319                         _jobs.append( (_X_plus_dXi, self.__extraArgs, funcrepr) )
320                     #
321                     import multiprocessing
322                     self.__pool = multiprocessing.Pool(self.__mpWorkers)
323                     _HX_plus_dX = self.__pool.map( ExecuteFunction, _jobs )
324                     self.__pool.close()
325                     self.__pool.join()
326                     #
327                     _HX = _HX_plus_dX.pop(0)
328                     #
329                     _Jacobienne = []
330                     for i in range( len(_dX) ):
331                         _Jacobienne.append( numpy.ravel(( _HX_plus_dX[i] - _HX ) / _dX[i]) )
332                     #
333                 elif self.__mfEnabled:
334                     _xserie = []
335                     _xserie.append( _X.A1 )
336                     for i in range( len(_dX) ):
337                         _X_plus_dXi    = numpy.array( _X.A1, dtype=float )
338                         _X_plus_dXi[i] = _X[i] + _dX[i]
339                         #
340                         _xserie.append( _X_plus_dXi )
341                     #
342                     _HX_plus_dX = self.DirectOperator( _xserie )
343                     #
344                     _HX = _HX_plus_dX.pop(0)
345                     #
346                     _Jacobienne = []
347                     for i in range( len(_dX) ):
348                         _Jacobienne.append( numpy.ravel(( _HX_plus_dX[i] - _HX ) / _dX[i]) )
349                    #
350                 else:
351                     _Jacobienne  = []
352                     _HX = self.DirectOperator( _X )
353                     for i in range( _dX.size ):
354                         _dXi            = _dX[i]
355                         _X_plus_dXi     = numpy.array( _X.A1, dtype=float )
356                         _X_plus_dXi[i]  = _X[i] + _dXi
357                         #
358                         _HX_plus_dXi = self.DirectOperator( _X_plus_dXi )
359                         #
360                         _Jacobienne.append( numpy.ravel(( _HX_plus_dXi - _HX ) / _dXi) )
361                 #
362             #
363             _Jacobienne = numpy.asmatrix( numpy.vstack( _Jacobienne ) ).T
364             if self.__avoidRC:
365                 if self.__lenghtRJ < 0: self.__lenghtRJ = 2 * _X.size
366                 while len(self.__listJPCP) > self.__lenghtRJ:
367                     self.__listJPCP.pop(0)
368                     self.__listJPCI.pop(0)
369                     self.__listJPCR.pop(0)
370                     self.__listJPPN.pop(0)
371                     self.__listJPIN.pop(0)
372                 self.__listJPCP.append( copy.copy(_X) )
373                 self.__listJPCI.append( copy.copy(_dX) )
374                 self.__listJPCR.append( copy.copy(_Jacobienne) )
375                 self.__listJPPN.append( numpy.linalg.norm(_X) )
376                 self.__listJPIN.append( numpy.linalg.norm(_Jacobienne) )
377         #
378         logging.debug("FDA Fin du calcul de la Jacobienne")
379         #
380         return _Jacobienne
381
382     # ---------------------------------------------------------
383     def TangentOperator(self, paire, **extraArgs ):
384         """
385         Calcul du tangent à l'aide de la Jacobienne.
386
387         NB : les extraArgs sont là pour assurer la compatibilité d'appel, mais
388         ne doivent pas être données ici à la fonction utilisateur.
389         """
390         if self.__mfEnabled:
391             assert len(paire) == 1, "Incorrect lenght of arguments"
392             _paire = paire[0]
393             assert len(_paire) == 2, "Incorrect number of arguments"
394         else:
395             assert len(paire) == 2, "Incorrect number of arguments"
396             _paire = paire
397         X, dX = _paire
398         _Jacobienne = self.TangentMatrix( X )
399         if dX is None or len(dX) == 0:
400             #
401             # Calcul de la forme matricielle si le second argument est None
402             # -------------------------------------------------------------
403             if self.__mfEnabled: return [_Jacobienne,]
404             else:                return _Jacobienne
405         else:
406             #
407             # Calcul de la valeur linéarisée de H en X appliqué à dX
408             # ------------------------------------------------------
409             _dX = numpy.asmatrix(numpy.ravel( dX )).T
410             _HtX = numpy.dot(_Jacobienne, _dX)
411             if self.__mfEnabled: return [_HtX.A1,]
412             else:                return _HtX.A1
413
414     # ---------------------------------------------------------
415     def AdjointOperator(self, paire, **extraArgs ):
416         """
417         Calcul de l'adjoint à l'aide de la Jacobienne.
418
419         NB : les extraArgs sont là pour assurer la compatibilité d'appel, mais
420         ne doivent pas être données ici à la fonction utilisateur.
421         """
422         if self.__mfEnabled:
423             assert len(paire) == 1, "Incorrect lenght of arguments"
424             _paire = paire[0]
425             assert len(_paire) == 2, "Incorrect number of arguments"
426         else:
427             assert len(paire) == 2, "Incorrect number of arguments"
428             _paire = paire
429         X, Y = _paire
430         _JacobienneT = self.TangentMatrix( X ).T
431         if Y is None or len(Y) == 0:
432             #
433             # Calcul de la forme matricielle si le second argument est None
434             # -------------------------------------------------------------
435             if self.__mfEnabled: return [_JacobienneT,]
436             else:                return _JacobienneT
437         else:
438             #
439             # Calcul de la valeur de l'adjoint en X appliqué à Y
440             # --------------------------------------------------
441             _Y = numpy.asmatrix(numpy.ravel( Y )).T
442             _HaY = numpy.dot(_JacobienneT, _Y)
443             if self.__mfEnabled: return [_HaY.A1,]
444             else:                return _HaY.A1
445
446 # ==============================================================================
447 def EnsembleOfCenteredPerturbations( _bgcenter, _bgcovariance, _nbmembers ):
448     "Génération d'un ensemble de taille _nbmembers-1 d'états aléatoires centrés"
449     #
450     _bgcenter = numpy.ravel(_bgcenter)[:,None]
451     if _nbmembers < 1:
452         raise ValueError("Number of members has to be strictly more than 1 (given number: %s)."%(str(_nbmembers),))
453     #
454     if _bgcovariance is None:
455         BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers)
456     else:
457         _Z = numpy.random.multivariate_normal(numpy.zeros(_bgcenter.size), _bgcovariance, size=_nbmembers).T
458         BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers) + _Z
459     #
460     return BackgroundEnsemble
461
462 # ==============================================================================
463 def EnsembleOfBackgroundPerturbations( _bgcenter, _bgcovariance, _nbmembers, _withSVD = True):
464     "Génération d'un ensemble de taille _nbmembers-1 d'états aléatoires centrés"
465     def __CenteredRandomAnomalies(Zr, N):
466         """
467         Génère une matrice de N anomalies aléatoires centrées sur Zr selon les
468         notes manuscrites de MB et conforme au code de PS avec eps = -1
469         """
470         eps = -1
471         Q = numpy.identity(N-1)-numpy.ones((N-1,N-1))/numpy.sqrt(N)/(numpy.sqrt(N)-eps)
472         Q = numpy.concatenate((Q, [eps*numpy.ones(N-1)/numpy.sqrt(N)]), axis=0)
473         R, _ = numpy.linalg.qr(numpy.random.normal(size = (N-1,N-1)))
474         Q = numpy.dot(Q,R)
475         Zr = numpy.dot(Q,Zr)
476         return Zr.T
477     #
478     _bgcenter = numpy.ravel(_bgcenter).reshape((-1,1))
479     if _nbmembers < 1:
480         raise ValueError("Number of members has to be strictly more than 1 (given number: %s)."%(str(_nbmembers),))
481     if _bgcovariance is None:
482         BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers)
483     else:
484         if _withSVD:
485             U, s, V = numpy.linalg.svd(_bgcovariance, full_matrices=False)
486             _nbctl = _bgcenter.size
487             if _nbmembers > _nbctl:
488                 _Z = numpy.concatenate((numpy.dot(
489                     numpy.diag(numpy.sqrt(s[:_nbctl])), V[:_nbctl]),
490                     numpy.random.multivariate_normal(numpy.zeros(_nbctl),_bgcovariance,_nbmembers-1-_nbctl)), axis = 0)
491             else:
492                 _Z = numpy.dot(numpy.diag(numpy.sqrt(s[:_nbmembers-1])), V[:_nbmembers-1])
493             _Zca = __CenteredRandomAnomalies(_Z, _nbmembers)
494             BackgroundEnsemble = _bgcenter + _Zca
495         else:
496             if max(abs(_bgcovariance.flatten())) > 0:
497                 _nbctl = _bgcenter.size
498                 _Z = numpy.random.multivariate_normal(numpy.zeros(_nbctl),_bgcovariance,_nbmembers-1)
499                 _Zca = __CenteredRandomAnomalies(_Z, _nbmembers)
500                 BackgroundEnsemble = _bgcenter + _Zca
501             else:
502                 BackgroundEnsemble = numpy.tile( _bgcenter, _nbmembers)
503     #
504     return BackgroundEnsemble
505
506 # ==============================================================================
507 def EnsembleMean( __Ensemble ):
508     "Renvoie la moyenne empirique d'un ensemble"
509     return numpy.asarray(__Ensemble).mean(axis=1, dtype=mfp).astype('float').reshape((-1,1))
510
511 # ==============================================================================
512 def EnsembleOfAnomalies( Ensemble, OptMean = None, Normalisation = 1.):
513     "Renvoie les anomalies centrées à partir d'un ensemble"
514     if OptMean is None:
515         __Em = EnsembleMean( Ensemble )
516     else:
517         __Em = numpy.ravel(OptMean).reshape((-1,1))
518     #
519     return Normalisation * (numpy.asarray(Ensemble) - __Em)
520
521 # ==============================================================================
522 def EnsembleErrorCovariance( Ensemble, __quick = False ):
523     "Renvoie l'estimation empirique de la covariance d'ensemble"
524     if __quick:
525         # Covariance rapide mais rarement définie positive
526         __Covariance = numpy.cov(Ensemble)
527     else:
528         # Résultat souvent identique à numpy.cov, mais plus robuste
529         __n, __m = numpy.asarray(Ensemble).shape
530         __Anomalies = EnsembleOfAnomalies( Ensemble )
531         # Estimation empirique
532         __Covariance = (__Anomalies @ __Anomalies.T) / (__m-1)
533         # Assure la symétrie
534         __Covariance = (__Covariance + __Covariance.T) * 0.5
535         # Assure la positivité
536         __epsilon    = mpr*numpy.trace(__Covariance)
537         __Covariance = __Covariance + __epsilon * numpy.identity(__n)
538     #
539     return __Covariance
540
541 # ==============================================================================
542 def EnsemblePerturbationWithGivenCovariance( __Ensemble, __Covariance, __Seed=None ):
543     "Ajout d'une perturbation à chaque membre d'un ensemble selon une covariance prescrite"
544     if hasattr(__Covariance,"assparsematrix"):
545         if (abs(__Ensemble).mean() > mpr) and (abs(__Covariance.assparsematrix())/abs(__Ensemble).mean() < mpr).all():
546             # Traitement d'une covariance nulle ou presque
547             return __Ensemble
548         if (abs(__Ensemble).mean() <= mpr) and (abs(__Covariance.assparsematrix()) < mpr).all():
549             # Traitement d'une covariance nulle ou presque
550             return __Ensemble
551     else:
552         if (abs(__Ensemble).mean() > mpr) and (abs(__Covariance)/abs(__Ensemble).mean() < mpr).all():
553             # Traitement d'une covariance nulle ou presque
554             return __Ensemble
555         if (abs(__Ensemble).mean() <= mpr) and (abs(__Covariance) < mpr).all():
556             # Traitement d'une covariance nulle ou presque
557             return __Ensemble
558     #
559     __n, __m = __Ensemble.shape
560     if __Seed is not None: numpy.random.seed(__Seed)
561     #
562     if hasattr(__Covariance,"isscalar") and __Covariance.isscalar():
563         # Traitement d'une covariance multiple de l'identité
564         __zero = 0.
565         __std  = numpy.sqrt(__Covariance.assparsematrix())
566         __Ensemble += numpy.random.normal(__zero, __std, size=(__m,__n)).T
567     #
568     elif hasattr(__Covariance,"isvector") and __Covariance.isvector():
569         # Traitement d'une covariance diagonale avec variances non identiques
570         __zero = numpy.zeros(__n)
571         __std  = numpy.sqrt(__Covariance.assparsematrix())
572         __Ensemble += numpy.asarray([numpy.random.normal(__zero, __std) for i in range(__m)]).T
573     #
574     elif hasattr(__Covariance,"ismatrix") and __Covariance.ismatrix():
575         # Traitement d'une covariance pleine
576         __Ensemble += numpy.random.multivariate_normal(numpy.zeros(__n), __Covariance.asfullmatrix(__n), size=__m).T
577     #
578     elif isinstance(__Covariance, numpy.ndarray):
579         # Traitement d'une covariance numpy pleine, sachant qu'on arrive ici en dernier
580         __Ensemble += numpy.random.multivariate_normal(numpy.zeros(__n), __Covariance, size=__m).T
581     #
582     else:
583         raise ValueError("Error in ensemble perturbation with inadequate covariance specification")
584     #
585     return __Ensemble
586
587 # ==============================================================================
588 def CovarianceInflation(
589         InputCovOrEns,
590         InflationType   = None,
591         InflationFactor = None,
592         BackgroundCov   = None,
593         ):
594     """
595     Inflation applicable soit sur Pb ou Pa, soit sur les ensembles EXb ou EXa
596
597     Synthèse : Hunt 2007, section 2.3.5
598     """
599     if InflationFactor is None:
600         return InputCovOrEns
601     else:
602         InflationFactor = float(InflationFactor)
603     #
604     if InflationType in ["MultiplicativeOnAnalysisCovariance", "MultiplicativeOnBackgroundCovariance"]:
605         if InflationFactor < 1.:
606             raise ValueError("Inflation factor for multiplicative inflation has to be greater or equal than 1.")
607         if InflationFactor < 1.+mpr:
608             return InputCovOrEns
609         OutputCovOrEns = InflationFactor**2 * InputCovOrEns
610     #
611     elif InflationType in ["MultiplicativeOnAnalysisAnomalies", "MultiplicativeOnBackgroundAnomalies"]:
612         if InflationFactor < 1.:
613             raise ValueError("Inflation factor for multiplicative inflation has to be greater or equal than 1.")
614         if InflationFactor < 1.+mpr:
615             return InputCovOrEns
616         InputCovOrEnsMean = InputCovOrEns.mean(axis=1, dtype=mfp).astype('float')
617         OutputCovOrEns = InputCovOrEnsMean[:,numpy.newaxis] \
618             + InflationFactor * (InputCovOrEns - InputCovOrEnsMean[:,numpy.newaxis])
619     #
620     elif InflationType in ["AdditiveOnAnalysisCovariance", "AdditiveOnBackgroundCovariance"]:
621         if InflationFactor < 0.:
622             raise ValueError("Inflation factor for additive inflation has to be greater or equal than 0.")
623         if InflationFactor < mpr:
624             return InputCovOrEns
625         __n, __m = numpy.asarray(InputCovOrEns).shape
626         if __n != __m:
627             raise ValueError("Additive inflation can only be applied to squared (covariance) matrix.")
628         OutputCovOrEns = (1. - InflationFactor) * InputCovOrEns + InflationFactor * numpy.identity(__n)
629     #
630     elif InflationType == "HybridOnBackgroundCovariance":
631         if InflationFactor < 0.:
632             raise ValueError("Inflation factor for hybrid inflation has to be greater or equal than 0.")
633         if InflationFactor < mpr:
634             return InputCovOrEns
635         __n, __m = numpy.asarray(InputCovOrEns).shape
636         if __n != __m:
637             raise ValueError("Additive inflation can only be applied to squared (covariance) matrix.")
638         if BackgroundCov is None:
639             raise ValueError("Background covariance matrix B has to be given for hybrid inflation.")
640         if InputCovOrEns.shape != BackgroundCov.shape:
641             raise ValueError("Ensemble covariance matrix has to be of same size than background covariance matrix B.")
642         OutputCovOrEns = (1. - InflationFactor) * InputCovOrEns + InflationFactor * BackgroundCov
643     #
644     elif InflationType == "Relaxation":
645         raise NotImplementedError("InflationType Relaxation")
646     #
647     else:
648         raise ValueError("Error in inflation type, '%s' is not a valid keyword."%InflationType)
649     #
650     return OutputCovOrEns
651
652 # ==============================================================================
653 def QuantilesEstimations(selfA, A, Xa, HXa = None, Hm = None, HtM = None):
654     "Estimation des quantiles a posteriori (selfA est modifié)"
655     nbsamples = selfA._parameters["NumberOfSamplesForQuantiles"]
656     #
657     # Traitement des bornes
658     if "StateBoundsForQuantiles" in selfA._parameters:
659         LBounds = selfA._parameters["StateBoundsForQuantiles"] # Prioritaire
660     elif "Bounds" in selfA._parameters:
661         LBounds = selfA._parameters["Bounds"]  # Défaut raisonnable
662     else:
663         LBounds = None
664     if LBounds is not None:
665         def NoneRemove(paire):
666             bmin, bmax = paire
667             if bmin is None: bmin = numpy.finfo('float').min
668             if bmax is None: bmax = numpy.finfo('float').max
669             return [bmin, bmax]
670         LBounds = numpy.matrix( [NoneRemove(paire) for paire in LBounds] )
671     #
672     # Échantillonnage des états
673     YfQ  = None
674     EXr  = None
675     if selfA._parameters["SimulationForQuantiles"] == "Linear" and HXa is not None:
676         HXa  = numpy.matrix(numpy.ravel( HXa )).T
677     for i in range(nbsamples):
678         if selfA._parameters["SimulationForQuantiles"] == "Linear" and HtM is not None:
679             dXr = numpy.matrix(numpy.random.multivariate_normal(numpy.ravel(Xa),A) - numpy.ravel(Xa)).T
680             if LBounds is not None: # "EstimateProjection" par défaut
681                 dXr = numpy.max(numpy.hstack((dXr,LBounds[:,0]) - Xa),axis=1)
682                 dXr = numpy.min(numpy.hstack((dXr,LBounds[:,1]) - Xa),axis=1)
683             dYr = numpy.matrix(numpy.ravel( HtM * dXr )).T
684             Yr = HXa + dYr
685             if selfA._toStore("SampledStateForQuantiles"): Xr = Xa + dXr
686         elif selfA._parameters["SimulationForQuantiles"] == "NonLinear" and Hm is not None:
687             Xr = numpy.matrix(numpy.random.multivariate_normal(numpy.ravel(Xa),A)).T
688             if LBounds is not None: # "EstimateProjection" par défaut
689                 Xr = numpy.max(numpy.hstack((Xr,LBounds[:,0])),axis=1)
690                 Xr = numpy.min(numpy.hstack((Xr,LBounds[:,1])),axis=1)
691             Yr = numpy.matrix(numpy.ravel( Hm( Xr ) )).T
692         else:
693             raise ValueError("Quantile simulations has only to be Linear or NonLinear.")
694         #
695         if YfQ is None:
696             YfQ = Yr
697             if selfA._toStore("SampledStateForQuantiles"): EXr = numpy.ravel(Xr)
698         else:
699             YfQ = numpy.hstack((YfQ,Yr))
700             if selfA._toStore("SampledStateForQuantiles"): EXr = numpy.vstack((EXr,numpy.ravel(Xr)))
701     #
702     # Extraction des quantiles
703     YfQ.sort(axis=-1)
704     YQ = None
705     for quantile in selfA._parameters["Quantiles"]:
706         if not (0. <= float(quantile) <= 1.): continue
707         indice = int(nbsamples * float(quantile) - 1./nbsamples)
708         if YQ is None: YQ = YfQ[:,indice]
709         else:          YQ = numpy.hstack((YQ,YfQ[:,indice]))
710     selfA.StoredVariables["SimulationQuantiles"].store( YQ )
711     if selfA._toStore("SampledStateForQuantiles"):
712         selfA.StoredVariables["SampledStateForQuantiles"].store( EXr.T )
713     #
714     return 0
715
716 # ==============================================================================
717 def enks(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="EnKS16-KalmanFilterFormula"):
718     """
719     EnKS
720     """
721     #
722     # Opérateurs
723     H = HO["Direct"].appliedControledFormTo
724     #
725     if selfA._parameters["EstimationOf"] == "State":
726         M = EM["Direct"].appliedControledFormTo
727     #
728     if CM is not None and "Tangent" in CM and U is not None:
729         Cm = CM["Tangent"].asMatrix(Xb)
730     else:
731         Cm = None
732     #
733     # Précalcul des inversions de B et R
734     RIdemi = R.sqrtmI()
735     #
736     # Durée d'observation et tailles
737     LagL = selfA._parameters["SmootherLagL"]
738     if (not hasattr(Y,"store")) or (not hasattr(Y,"stepnumber")):
739         raise ValueError("Fixed-lag smoother requires a series of observation")
740     if Y.stepnumber() < LagL:
741         raise ValueError("Fixed-lag smoother requires a series of observation greater then the lag L")
742     duration = Y.stepnumber()
743     __p = numpy.cumprod(Y.shape())[-1]
744     __n = Xb.size
745     __m = selfA._parameters["NumberOfMembers"]
746     #
747     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
748         selfA.StoredVariables["Analysis"].store( Xb )
749         if selfA._toStore("APosterioriCovariance"):
750             if hasattr(B,"asfullmatrix"):
751                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
752             else:
753                 selfA.StoredVariables["APosterioriCovariance"].store( B )
754     #
755     # Calcul direct initial (on privilégie la mémorisation au recalcul)
756     __seed = numpy.random.get_state()
757     selfB = copy.deepcopy(selfA)
758     selfB._parameters["StoreSupplementaryCalculations"] = ["CurrentEnsembleState"]
759     if VariantM == "EnKS16-KalmanFilterFormula":
760         etkf(selfB, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM = "KalmanFilterFormula")
761     else:
762         raise ValueError("VariantM has to be chosen in the authorized methods list.")
763     if LagL > 0:
764         EL  = selfB.StoredVariables["CurrentEnsembleState"][LagL-1]
765     else:
766         EL = EnsembleOfBackgroundPerturbations( Xb, None, __m ) # Cf. etkf
767     selfA._parameters["SetSeed"] = numpy.random.set_state(__seed)
768     #
769     for step in range(LagL,duration-1):
770         #
771         sEL = selfB.StoredVariables["CurrentEnsembleState"][step+1-LagL:step+1]
772         sEL.append(None)
773         #
774         if hasattr(Y,"store"):
775             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
776         else:
777             Ynpu = numpy.ravel( Y ).reshape((__p,1))
778         #
779         if U is not None:
780             if hasattr(U,"store") and len(U)>1:
781                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
782             elif hasattr(U,"store") and len(U)==1:
783                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
784             else:
785                 Un = numpy.asmatrix(numpy.ravel( U )).T
786         else:
787             Un = None
788         #
789         #--------------------------
790         if VariantM == "EnKS16-KalmanFilterFormula":
791             if selfA._parameters["EstimationOf"] == "State": # Forecast
792                 EL = M( [(EL[:,i], Un) for i in range(__m)],
793                     argsAsSerie = True,
794                     returnSerieAsArrayMatrix = True )
795                 EL = EnsemblePerturbationWithGivenCovariance( EL, Q )
796                 EZ = H( [(EL[:,i], Un) for i in range(__m)],
797                     argsAsSerie = True,
798                     returnSerieAsArrayMatrix = True )
799                 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
800                     Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
801                     EZ = EZ + Cm * Un
802             elif selfA._parameters["EstimationOf"] == "Parameters":
803                 # --- > Par principe, M = Id, Q = 0
804                 EZ = H( [(EL[:,i], Un) for i in range(__m)],
805                     argsAsSerie = True,
806                     returnSerieAsArrayMatrix = True )
807             #
808             vEm   = EL.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
809             vZm   = EZ.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
810             #
811             mS    = RIdemi @ EnsembleOfAnomalies( EZ, vZm, 1./math.sqrt(__m-1) )
812             mS    = mS.reshape((-1,__m)) # Pour dimension 1
813             delta = RIdemi @ ( Ynpu - vZm )
814             mT    = numpy.linalg.inv( numpy.identity(__m) + mS.T @ mS )
815             vw    = mT @ mS.T @ delta
816             #
817             Tdemi = numpy.real(scipy.linalg.sqrtm(mT))
818             mU    = numpy.identity(__m)
819             wTU   = (vw.reshape((__m,1)) + math.sqrt(__m-1) * Tdemi @ mU)
820             #
821             EX    = EnsembleOfAnomalies( EL, vEm, 1./math.sqrt(__m-1) )
822             EL    = vEm + EX @ wTU
823             #
824             sEL[LagL] = EL
825             for irl in range(LagL): # Lissage des L précédentes analysis
826                 vEm = sEL[irl].mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
827                 EX = EnsembleOfAnomalies( sEL[irl], vEm, 1./math.sqrt(__m-1) )
828                 sEL[irl] = vEm + EX @ wTU
829             #
830             # Conservation de l'analyse retrospective d'ordre 0 avant rotation
831             Xa = sEL[0].mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
832             if selfA._toStore("APosterioriCovariance"):
833                 EXn = sEL[0]
834             #
835             for irl in range(LagL):
836                 sEL[irl] = sEL[irl+1]
837             sEL[LagL] = None
838         #--------------------------
839         else:
840             raise ValueError("VariantM has to be chosen in the authorized methods list.")
841         #
842         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
843         # ---> avec analysis
844         selfA.StoredVariables["Analysis"].store( Xa )
845         if selfA._toStore("APosterioriCovariance"):
846             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(EXn) )
847     #
848     # Stockage des dernières analyses incomplètement remises à jour
849     for irl in range(LagL):
850         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
851         Xa = sEL[irl].mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
852         selfA.StoredVariables["Analysis"].store( Xa )
853     #
854     return 0
855
856 # ==============================================================================
857 def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"):
858     """
859     Ensemble-Transform EnKF
860     """
861     if selfA._parameters["EstimationOf"] == "Parameters":
862         selfA._parameters["StoreInternalVariables"] = True
863     #
864     # Opérateurs
865     H = HO["Direct"].appliedControledFormTo
866     #
867     if selfA._parameters["EstimationOf"] == "State":
868         M = EM["Direct"].appliedControledFormTo
869     #
870     if CM is not None and "Tangent" in CM and U is not None:
871         Cm = CM["Tangent"].asMatrix(Xb)
872     else:
873         Cm = None
874     #
875     # Durée d'observation et tailles
876     if hasattr(Y,"stepnumber"):
877         duration = Y.stepnumber()
878         __p = numpy.cumprod(Y.shape())[-1]
879     else:
880         duration = 2
881         __p = numpy.array(Y).size
882     #
883     # Précalcul des inversions de B et R
884     if selfA._parameters["StoreInternalVariables"] \
885         or selfA._toStore("CostFunctionJ") \
886         or selfA._toStore("CostFunctionJb") \
887         or selfA._toStore("CostFunctionJo") \
888         or selfA._toStore("CurrentOptimum") \
889         or selfA._toStore("APosterioriCovariance"):
890         BI = B.getI()
891         RI = R.getI()
892     elif VariantM != "KalmanFilterFormula":
893         RI = R.getI()
894     if VariantM == "KalmanFilterFormula":
895         RIdemi = R.sqrtmI()
896     #
897     __n = Xb.size
898     __m = selfA._parameters["NumberOfMembers"]
899     #
900     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
901         Xn = EnsembleOfBackgroundPerturbations( Xb, None, __m )
902         selfA.StoredVariables["Analysis"].store( Xb )
903         if selfA._toStore("APosterioriCovariance"):
904             if hasattr(B,"asfullmatrix"):
905                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
906             else:
907                 selfA.StoredVariables["APosterioriCovariance"].store( B )
908         selfA._setInternalState("seed", numpy.random.get_state())
909     elif selfA._parameters["nextStep"]:
910         Xn = selfA._getInternalState("Xn")
911     #
912     previousJMinimum = numpy.finfo(float).max
913     #
914     for step in range(duration-1):
915         numpy.random.set_state(selfA._getInternalState("seed"))
916         if hasattr(Y,"store"):
917             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
918         else:
919             Ynpu = numpy.ravel( Y ).reshape((__p,1))
920         #
921         if U is not None:
922             if hasattr(U,"store") and len(U)>1:
923                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
924             elif hasattr(U,"store") and len(U)==1:
925                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
926             else:
927                 Un = numpy.asmatrix(numpy.ravel( U )).T
928         else:
929             Un = None
930         #
931         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
932             Xn = CovarianceInflation( Xn,
933                 selfA._parameters["InflationType"],
934                 selfA._parameters["InflationFactor"],
935                 )
936         #
937         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
938             EMX = M( [(Xn[:,i], Un) for i in range(__m)],
939                 argsAsSerie = True,
940                 returnSerieAsArrayMatrix = True )
941             Xn_predicted = EnsemblePerturbationWithGivenCovariance( EMX, Q )
942             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
943                 argsAsSerie = True,
944                 returnSerieAsArrayMatrix = True )
945             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
946                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
947                 Xn_predicted = Xn_predicted + Cm * Un
948         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
949             # --- > Par principe, M = Id, Q = 0
950             Xn_predicted = EMX = Xn
951             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
952                 argsAsSerie = True,
953                 returnSerieAsArrayMatrix = True )
954         #
955         # Mean of forecast and observation of forecast
956         Xfm  = Xn_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
957         Hfm  = HX_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
958         #
959         # Anomalies
960         EaX   = EnsembleOfAnomalies( Xn_predicted, Xfm )
961         EaHX  = EnsembleOfAnomalies( HX_predicted, Hfm)
962         #
963         #--------------------------
964         if VariantM == "KalmanFilterFormula":
965             mS    = RIdemi * EaHX / math.sqrt(__m-1)
966             mS    = mS.reshape((-1,__m)) # Pour dimension 1
967             delta = RIdemi * ( Ynpu - Hfm )
968             mT    = numpy.linalg.inv( numpy.identity(__m) + mS.T @ mS )
969             vw    = mT @ mS.T @ delta
970             #
971             Tdemi = numpy.real(scipy.linalg.sqrtm(mT))
972             mU    = numpy.identity(__m)
973             #
974             EaX   = EaX / math.sqrt(__m-1)
975             Xn    = Xfm + EaX @ ( vw.reshape((__m,1)) + math.sqrt(__m-1) * Tdemi @ mU )
976         #--------------------------
977         elif VariantM == "Variational":
978             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
979             def CostFunction(w):
980                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
981                 _Jo = 0.5 * _A.T @ (RI * _A)
982                 _Jb = 0.5 * (__m-1) * w.T @ w
983                 _J  = _Jo + _Jb
984                 return float(_J)
985             def GradientOfCostFunction(w):
986                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
987                 _GardJo = - EaHX.T @ (RI * _A)
988                 _GradJb = (__m-1) * w.reshape((__m,1))
989                 _GradJ  = _GardJo + _GradJb
990                 return numpy.ravel(_GradJ)
991             vw = scipy.optimize.fmin_cg(
992                 f           = CostFunction,
993                 x0          = numpy.zeros(__m),
994                 fprime      = GradientOfCostFunction,
995                 args        = (),
996                 disp        = False,
997                 )
998             #
999             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1000             Htb = (__m-1) * numpy.identity(__m)
1001             Hta = Hto + Htb
1002             #
1003             Pta = numpy.linalg.inv( Hta )
1004             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1005             #
1006             Xn  = Xfm + EaX @ (vw[:,None] + EWa)
1007         #--------------------------
1008         elif VariantM == "FiniteSize11": # Jauge Boc2011
1009             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
1010             def CostFunction(w):
1011                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1012                 _Jo = 0.5 * _A.T @ (RI * _A)
1013                 _Jb = 0.5 * __m * math.log(1 + 1/__m + w.T @ w)
1014                 _J  = _Jo + _Jb
1015                 return float(_J)
1016             def GradientOfCostFunction(w):
1017                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1018                 _GardJo = - EaHX.T @ (RI * _A)
1019                 _GradJb = __m * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w)
1020                 _GradJ  = _GardJo + _GradJb
1021                 return numpy.ravel(_GradJ)
1022             vw = scipy.optimize.fmin_cg(
1023                 f           = CostFunction,
1024                 x0          = numpy.zeros(__m),
1025                 fprime      = GradientOfCostFunction,
1026                 args        = (),
1027                 disp        = False,
1028                 )
1029             #
1030             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1031             Htb = __m * \
1032                 ( (1 + 1/__m + vw.T @ vw) * numpy.identity(__m) - 2 * vw @ vw.T ) \
1033                 / (1 + 1/__m + vw.T @ vw)**2
1034             Hta = Hto + Htb
1035             #
1036             Pta = numpy.linalg.inv( Hta )
1037             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1038             #
1039             Xn  = Xfm + EaX @ (vw.reshape((__m,1)) + EWa)
1040         #--------------------------
1041         elif VariantM == "FiniteSize15": # Jauge Boc2015
1042             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
1043             def CostFunction(w):
1044                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1045                 _Jo = 0.5 * _A.T * RI * _A
1046                 _Jb = 0.5 * (__m+1) * math.log(1 + 1/__m + w.T @ w)
1047                 _J  = _Jo + _Jb
1048                 return float(_J)
1049             def GradientOfCostFunction(w):
1050                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1051                 _GardJo = - EaHX.T @ (RI * _A)
1052                 _GradJb = (__m+1) * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w)
1053                 _GradJ  = _GardJo + _GradJb
1054                 return numpy.ravel(_GradJ)
1055             vw = scipy.optimize.fmin_cg(
1056                 f           = CostFunction,
1057                 x0          = numpy.zeros(__m),
1058                 fprime      = GradientOfCostFunction,
1059                 args        = (),
1060                 disp        = False,
1061                 )
1062             #
1063             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1064             Htb = (__m+1) * \
1065                 ( (1 + 1/__m + vw.T @ vw) * numpy.identity(__m) - 2 * vw @ vw.T ) \
1066                 / (1 + 1/__m + vw.T @ vw)**2
1067             Hta = Hto + Htb
1068             #
1069             Pta = numpy.linalg.inv( Hta )
1070             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1071             #
1072             Xn  = Xfm + EaX @ (vw.reshape((__m,1)) + EWa)
1073         #--------------------------
1074         elif VariantM == "FiniteSize16": # Jauge Boc2016
1075             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
1076             def CostFunction(w):
1077                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1078                 _Jo = 0.5 * _A.T @ (RI * _A)
1079                 _Jb = 0.5 * (__m+1) * math.log(1 + 1/__m + w.T @ w / (__m-1))
1080                 _J  = _Jo + _Jb
1081                 return float(_J)
1082             def GradientOfCostFunction(w):
1083                 _A  = Ynpu - HXfm.reshape((__p,1)) - (EaHX @ w).reshape((__p,1))
1084                 _GardJo = - EaHX.T @ (RI * _A)
1085                 _GradJb = ((__m+1) / (__m-1)) * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w / (__m-1))
1086                 _GradJ  = _GardJo + _GradJb
1087                 return numpy.ravel(_GradJ)
1088             vw = scipy.optimize.fmin_cg(
1089                 f           = CostFunction,
1090                 x0          = numpy.zeros(__m),
1091                 fprime      = GradientOfCostFunction,
1092                 args        = (),
1093                 disp        = False,
1094                 )
1095             #
1096             Hto = EaHX.T @ (RI * EaHX).reshape((-1,__m))
1097             Htb = ((__m+1) / (__m-1)) * \
1098                 ( (1 + 1/__m + vw.T @ vw / (__m-1)) * numpy.identity(__m) - 2 * vw @ vw.T / (__m-1) ) \
1099                 / (1 + 1/__m + vw.T @ vw / (__m-1))**2
1100             Hta = Hto + Htb
1101             #
1102             Pta = numpy.linalg.inv( Hta )
1103             EWa = numpy.real(scipy.linalg.sqrtm((__m-1)*Pta)) # Partie imaginaire ~= 10^-18
1104             #
1105             Xn  = Xfm + EaX @ (vw[:,None] + EWa)
1106         #--------------------------
1107         else:
1108             raise ValueError("VariantM has to be chosen in the authorized methods list.")
1109         #
1110         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
1111             Xn = CovarianceInflation( Xn,
1112                 selfA._parameters["InflationType"],
1113                 selfA._parameters["InflationFactor"],
1114                 )
1115         #
1116         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1117         #--------------------------
1118         selfA._setInternalState("Xn", Xn)
1119         selfA._setInternalState("seed", numpy.random.get_state())
1120         #--------------------------
1121         #
1122         if selfA._parameters["StoreInternalVariables"] \
1123             or selfA._toStore("CostFunctionJ") \
1124             or selfA._toStore("CostFunctionJb") \
1125             or selfA._toStore("CostFunctionJo") \
1126             or selfA._toStore("APosterioriCovariance") \
1127             or selfA._toStore("InnovationAtCurrentAnalysis") \
1128             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
1129             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1130             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
1131             _Innovation = Ynpu - _HXa
1132         #
1133         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1134         # ---> avec analysis
1135         selfA.StoredVariables["Analysis"].store( Xa )
1136         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
1137             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
1138         if selfA._toStore("InnovationAtCurrentAnalysis"):
1139             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
1140         # ---> avec current state
1141         if selfA._parameters["StoreInternalVariables"] \
1142             or selfA._toStore("CurrentState"):
1143             selfA.StoredVariables["CurrentState"].store( Xn )
1144         if selfA._toStore("ForecastState"):
1145             selfA.StoredVariables["ForecastState"].store( EMX )
1146         if selfA._toStore("ForecastCovariance"):
1147             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(EMX) )
1148         if selfA._toStore("BMA"):
1149             selfA.StoredVariables["BMA"].store( EMX - Xa )
1150         if selfA._toStore("InnovationAtCurrentState"):
1151             selfA.StoredVariables["InnovationAtCurrentState"].store( - HX_predicted + Ynpu )
1152         if selfA._toStore("SimulatedObservationAtCurrentState") \
1153             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1154             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
1155         # ---> autres
1156         if selfA._parameters["StoreInternalVariables"] \
1157             or selfA._toStore("CostFunctionJ") \
1158             or selfA._toStore("CostFunctionJb") \
1159             or selfA._toStore("CostFunctionJo") \
1160             or selfA._toStore("CurrentOptimum") \
1161             or selfA._toStore("APosterioriCovariance"):
1162             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
1163             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
1164             J   = Jb + Jo
1165             selfA.StoredVariables["CostFunctionJb"].store( Jb )
1166             selfA.StoredVariables["CostFunctionJo"].store( Jo )
1167             selfA.StoredVariables["CostFunctionJ" ].store( J )
1168             #
1169             if selfA._toStore("IndexOfOptimum") \
1170                 or selfA._toStore("CurrentOptimum") \
1171                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
1172                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
1173                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
1174                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1175                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1176             if selfA._toStore("IndexOfOptimum"):
1177                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1178             if selfA._toStore("CurrentOptimum"):
1179                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
1180             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1181                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
1182             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1183                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1184             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1185                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1186             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1187                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1188         if selfA._toStore("APosterioriCovariance"):
1189             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
1190         if selfA._parameters["EstimationOf"] == "Parameters" \
1191             and J < previousJMinimum:
1192             previousJMinimum    = J
1193             XaMin               = Xa
1194             if selfA._toStore("APosterioriCovariance"):
1195                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
1196         # ---> Pour les smoothers
1197         if selfA._toStore("CurrentEnsembleState"):
1198             selfA.StoredVariables["CurrentEnsembleState"].store( Xn )
1199     #
1200     # Stockage final supplémentaire de l'optimum en estimation de paramètres
1201     # ----------------------------------------------------------------------
1202     if selfA._parameters["EstimationOf"] == "Parameters":
1203         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1204         selfA.StoredVariables["Analysis"].store( XaMin )
1205         if selfA._toStore("APosterioriCovariance"):
1206             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
1207         if selfA._toStore("BMA"):
1208             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
1209     #
1210     return 0
1211
1212 # ==============================================================================
1213 def exkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
1214     """
1215     Extended Kalman Filter
1216     """
1217     if selfA._parameters["EstimationOf"] == "Parameters":
1218         selfA._parameters["StoreInternalVariables"] = True
1219     #
1220     # Opérateurs
1221     H = HO["Direct"].appliedControledFormTo
1222     #
1223     if selfA._parameters["EstimationOf"] == "State":
1224         M = EM["Direct"].appliedControledFormTo
1225     #
1226     if CM is not None and "Tangent" in CM and U is not None:
1227         Cm = CM["Tangent"].asMatrix(Xb)
1228     else:
1229         Cm = None
1230     #
1231     # Durée d'observation et tailles
1232     if hasattr(Y,"stepnumber"):
1233         duration = Y.stepnumber()
1234         __p = numpy.cumprod(Y.shape())[-1]
1235     else:
1236         duration = 2
1237         __p = numpy.array(Y).size
1238     #
1239     # Précalcul des inversions de B et R
1240     if selfA._parameters["StoreInternalVariables"] \
1241         or selfA._toStore("CostFunctionJ") \
1242         or selfA._toStore("CostFunctionJb") \
1243         or selfA._toStore("CostFunctionJo") \
1244         or selfA._toStore("CurrentOptimum") \
1245         or selfA._toStore("APosterioriCovariance"):
1246         BI = B.getI()
1247         RI = R.getI()
1248     #
1249     __n = Xb.size
1250     #
1251     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
1252         Xn = Xb
1253         Pn = B
1254         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1255         selfA.StoredVariables["Analysis"].store( Xb )
1256         if selfA._toStore("APosterioriCovariance"):
1257             if hasattr(B,"asfullmatrix"):
1258                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
1259             else:
1260                 selfA.StoredVariables["APosterioriCovariance"].store( B )
1261         selfA._setInternalState("seed", numpy.random.get_state())
1262     elif selfA._parameters["nextStep"]:
1263         Xn = selfA._getInternalState("Xn")
1264         Pn = selfA._getInternalState("Pn")
1265     #
1266     if selfA._parameters["EstimationOf"] == "Parameters":
1267         XaMin            = Xn
1268         previousJMinimum = numpy.finfo(float).max
1269     #
1270     for step in range(duration-1):
1271         if hasattr(Y,"store"):
1272             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
1273         else:
1274             Ynpu = numpy.ravel( Y ).reshape((__p,1))
1275         #
1276         Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xn)
1277         Ht = Ht.reshape(Ynpu.size,Xn.size) # ADAO & check shape
1278         Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = Xn)
1279         Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape
1280         #
1281         if selfA._parameters["EstimationOf"] == "State":
1282             Mt = EM["Tangent"].asMatrix(ValueForMethodForm = Xn)
1283             Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape
1284             Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = Xn)
1285             Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape
1286         #
1287         if U is not None:
1288             if hasattr(U,"store") and len(U)>1:
1289                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
1290             elif hasattr(U,"store") and len(U)==1:
1291                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
1292             else:
1293                 Un = numpy.asmatrix(numpy.ravel( U )).T
1294         else:
1295             Un = None
1296         #
1297         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
1298             Xn_predicted = numpy.asmatrix(numpy.ravel( M( (Xn, Un) ) )).T
1299             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
1300                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
1301                 Xn_predicted = Xn_predicted + Cm * Un
1302             Pn_predicted = Q + Mt * Pn * Ma
1303         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
1304             # --- > Par principe, M = Id, Q = 0
1305             Xn_predicted = Xn
1306             Pn_predicted = Pn
1307         #
1308         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
1309             Xn_predicted = numpy.max(numpy.hstack((Xn_predicted,numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
1310             Xn_predicted = numpy.min(numpy.hstack((Xn_predicted,numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
1311         #
1312         if selfA._parameters["EstimationOf"] == "State":
1313             HX_predicted = numpy.asmatrix(numpy.ravel( H( (Xn_predicted, None) ) )).T
1314             _Innovation  = Ynpu - HX_predicted
1315         elif selfA._parameters["EstimationOf"] == "Parameters":
1316             HX_predicted = numpy.asmatrix(numpy.ravel( H( (Xn_predicted, Un) ) )).T
1317             _Innovation  = Ynpu - HX_predicted
1318             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
1319                 _Innovation = _Innovation - Cm * Un
1320         #
1321         Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
1322         Xn = Xn_predicted + Kn * _Innovation
1323         Pn = Pn_predicted - Kn * Ht * Pn_predicted
1324         #
1325         Xa = Xn # Pointeurs
1326         #--------------------------
1327         selfA._setInternalState("Xn", Xn)
1328         selfA._setInternalState("Pn", Pn)
1329         #--------------------------
1330         #
1331         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1332         # ---> avec analysis
1333         selfA.StoredVariables["Analysis"].store( Xa )
1334         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
1335             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( H((Xa, Un)) )
1336         if selfA._toStore("InnovationAtCurrentAnalysis"):
1337             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
1338         # ---> avec current state
1339         if selfA._parameters["StoreInternalVariables"] \
1340             or selfA._toStore("CurrentState"):
1341             selfA.StoredVariables["CurrentState"].store( Xn )
1342         if selfA._toStore("ForecastState"):
1343             selfA.StoredVariables["ForecastState"].store( Xn_predicted )
1344         if selfA._toStore("ForecastCovariance"):
1345             selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted )
1346         if selfA._toStore("BMA"):
1347             selfA.StoredVariables["BMA"].store( Xn_predicted - Xa )
1348         if selfA._toStore("InnovationAtCurrentState"):
1349             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
1350         if selfA._toStore("SimulatedObservationAtCurrentState") \
1351             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1352             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
1353         # ---> autres
1354         if selfA._parameters["StoreInternalVariables"] \
1355             or selfA._toStore("CostFunctionJ") \
1356             or selfA._toStore("CostFunctionJb") \
1357             or selfA._toStore("CostFunctionJo") \
1358             or selfA._toStore("CurrentOptimum") \
1359             or selfA._toStore("APosterioriCovariance"):
1360             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
1361             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
1362             J   = Jb + Jo
1363             selfA.StoredVariables["CostFunctionJb"].store( Jb )
1364             selfA.StoredVariables["CostFunctionJo"].store( Jo )
1365             selfA.StoredVariables["CostFunctionJ" ].store( J )
1366             #
1367             if selfA._toStore("IndexOfOptimum") \
1368                 or selfA._toStore("CurrentOptimum") \
1369                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
1370                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
1371                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
1372                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1373                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1374             if selfA._toStore("IndexOfOptimum"):
1375                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1376             if selfA._toStore("CurrentOptimum"):
1377                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
1378             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1379                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
1380             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1381                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1382             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1383                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1384             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1385                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1386         if selfA._toStore("APosterioriCovariance"):
1387             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
1388         if selfA._parameters["EstimationOf"] == "Parameters" \
1389             and J < previousJMinimum:
1390             previousJMinimum    = J
1391             XaMin               = Xa
1392             if selfA._toStore("APosterioriCovariance"):
1393                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
1394     #
1395     # Stockage final supplémentaire de l'optimum en estimation de paramètres
1396     # ----------------------------------------------------------------------
1397     if selfA._parameters["EstimationOf"] == "Parameters":
1398         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1399         selfA.StoredVariables["Analysis"].store( XaMin )
1400         if selfA._toStore("APosterioriCovariance"):
1401             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
1402         if selfA._toStore("BMA"):
1403             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
1404     #
1405     return 0
1406
1407 # ==============================================================================
1408 def ienkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="IEnKF12",
1409     BnotT=False, _epsilon=1.e-3, _e=1.e-7, _jmax=15000):
1410     """
1411     Iterative EnKF
1412     """
1413     if selfA._parameters["EstimationOf"] == "Parameters":
1414         selfA._parameters["StoreInternalVariables"] = True
1415     #
1416     # Opérateurs
1417     H = HO["Direct"].appliedControledFormTo
1418     #
1419     if selfA._parameters["EstimationOf"] == "State":
1420         M = EM["Direct"].appliedControledFormTo
1421     #
1422     if CM is not None and "Tangent" in CM and U is not None:
1423         Cm = CM["Tangent"].asMatrix(Xb)
1424     else:
1425         Cm = None
1426     #
1427     # Durée d'observation et tailles
1428     if hasattr(Y,"stepnumber"):
1429         duration = Y.stepnumber()
1430         __p = numpy.cumprod(Y.shape())[-1]
1431     else:
1432         duration = 2
1433         __p = numpy.array(Y).size
1434     #
1435     # Précalcul des inversions de B et R
1436     if selfA._parameters["StoreInternalVariables"] \
1437         or selfA._toStore("CostFunctionJ") \
1438         or selfA._toStore("CostFunctionJb") \
1439         or selfA._toStore("CostFunctionJo") \
1440         or selfA._toStore("CurrentOptimum") \
1441         or selfA._toStore("APosterioriCovariance"):
1442         BI = B.getI()
1443     RI = R.getI()
1444     #
1445     __n = Xb.size
1446     __m = selfA._parameters["NumberOfMembers"]
1447     #
1448     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
1449         if hasattr(B,"asfullmatrix"): Pn = B.asfullmatrix(__n)
1450         else:                         Pn = B
1451         Xn = EnsembleOfBackgroundPerturbations( Xb, Pn, __m )
1452         selfA.StoredVariables["Analysis"].store( Xb )
1453         if selfA._toStore("APosterioriCovariance"):
1454             if hasattr(B,"asfullmatrix"):
1455                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
1456             else:
1457                 selfA.StoredVariables["APosterioriCovariance"].store( B )
1458         selfA._setInternalState("seed", numpy.random.get_state())
1459     elif selfA._parameters["nextStep"]:
1460         Xn = selfA._getInternalState("Xn")
1461     #
1462     previousJMinimum = numpy.finfo(float).max
1463     #
1464     for step in range(duration-1):
1465         numpy.random.set_state(selfA._getInternalState("seed"))
1466         if hasattr(Y,"store"):
1467             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
1468         else:
1469             Ynpu = numpy.ravel( Y ).reshape((__p,1))
1470         #
1471         if U is not None:
1472             if hasattr(U,"store") and len(U)>1:
1473                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
1474             elif hasattr(U,"store") and len(U)==1:
1475                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
1476             else:
1477                 Un = numpy.asmatrix(numpy.ravel( U )).T
1478         else:
1479             Un = None
1480         #
1481         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
1482             Xn = CovarianceInflation( Xn,
1483                 selfA._parameters["InflationType"],
1484                 selfA._parameters["InflationFactor"],
1485                 )
1486         #
1487         #--------------------------
1488         if VariantM == "IEnKF12":
1489             Xfm = numpy.ravel(Xn.mean(axis=1, dtype=mfp).astype('float'))
1490             EaX = EnsembleOfAnomalies( Xn ) / math.sqrt(__m-1)
1491             __j = 0
1492             Deltaw = 1
1493             if not BnotT:
1494                 Ta  = numpy.identity(__m)
1495             vw  = numpy.zeros(__m)
1496             while numpy.linalg.norm(Deltaw) >= _e and __j <= _jmax:
1497                 vx1 = (Xfm + EaX @ vw).reshape((__n,1))
1498                 #
1499                 if BnotT:
1500                     E1 = vx1 + _epsilon * EaX
1501                 else:
1502                     E1 = vx1 + math.sqrt(__m-1) * EaX @ Ta
1503                 #
1504                 if selfA._parameters["EstimationOf"] == "State": # Forecast + Q
1505                     E2 = M( [(E1[:,i,numpy.newaxis], Un) for i in range(__m)],
1506                         argsAsSerie = True,
1507                         returnSerieAsArrayMatrix = True )
1508                 elif selfA._parameters["EstimationOf"] == "Parameters":
1509                     # --- > Par principe, M = Id
1510                     E2 = Xn
1511                 vx2 = E2.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1512                 vy1 = H((vx2, Un)).reshape((__p,1))
1513                 #
1514                 HE2 = H( [(E2[:,i,numpy.newaxis], Un) for i in range(__m)],
1515                     argsAsSerie = True,
1516                     returnSerieAsArrayMatrix = True )
1517                 vy2 = HE2.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
1518                 #
1519                 if BnotT:
1520                     EaY = (HE2 - vy2) / _epsilon
1521                 else:
1522                     EaY = ( (HE2 - vy2) @ numpy.linalg.inv(Ta) ) / math.sqrt(__m-1)
1523                 #
1524                 GradJ = numpy.ravel(vw[:,None] - EaY.transpose() @ (RI * ( Ynpu - vy1 )))
1525                 mH = numpy.identity(__m) + EaY.transpose() @ (RI * EaY).reshape((-1,__m))
1526                 Deltaw = - numpy.linalg.solve(mH,GradJ)
1527                 #
1528                 vw = vw + Deltaw
1529                 #
1530                 if not BnotT:
1531                     Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
1532                 #
1533                 __j = __j + 1
1534             #
1535             A2 = EnsembleOfAnomalies( E2 )
1536             #
1537             if BnotT:
1538                 Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
1539                 A2 = math.sqrt(__m-1) * A2 @ Ta / _epsilon
1540             #
1541             Xn = vx2 + A2
1542         #--------------------------
1543         else:
1544             raise ValueError("VariantM has to be chosen in the authorized methods list.")
1545         #
1546         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
1547             Xn = CovarianceInflation( Xn,
1548                 selfA._parameters["InflationType"],
1549                 selfA._parameters["InflationFactor"],
1550                 )
1551         #
1552         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
1553         #--------------------------
1554         selfA._setInternalState("Xn", Xn)
1555         selfA._setInternalState("seed", numpy.random.get_state())
1556         #--------------------------
1557         #
1558         if selfA._parameters["StoreInternalVariables"] \
1559             or selfA._toStore("CostFunctionJ") \
1560             or selfA._toStore("CostFunctionJb") \
1561             or selfA._toStore("CostFunctionJo") \
1562             or selfA._toStore("APosterioriCovariance") \
1563             or selfA._toStore("InnovationAtCurrentAnalysis") \
1564             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
1565             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1566             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
1567             _Innovation = Ynpu - _HXa
1568         #
1569         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1570         # ---> avec analysis
1571         selfA.StoredVariables["Analysis"].store( Xa )
1572         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
1573             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
1574         if selfA._toStore("InnovationAtCurrentAnalysis"):
1575             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
1576         # ---> avec current state
1577         if selfA._parameters["StoreInternalVariables"] \
1578             or selfA._toStore("CurrentState"):
1579             selfA.StoredVariables["CurrentState"].store( Xn )
1580         if selfA._toStore("ForecastState"):
1581             selfA.StoredVariables["ForecastState"].store( E2 )
1582         if selfA._toStore("ForecastCovariance"):
1583             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(E2) )
1584         if selfA._toStore("BMA"):
1585             selfA.StoredVariables["BMA"].store( E2 - Xa )
1586         if selfA._toStore("InnovationAtCurrentState"):
1587             selfA.StoredVariables["InnovationAtCurrentState"].store( - HE2 + Ynpu )
1588         if selfA._toStore("SimulatedObservationAtCurrentState") \
1589             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1590             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HE2 )
1591         # ---> autres
1592         if selfA._parameters["StoreInternalVariables"] \
1593             or selfA._toStore("CostFunctionJ") \
1594             or selfA._toStore("CostFunctionJb") \
1595             or selfA._toStore("CostFunctionJo") \
1596             or selfA._toStore("CurrentOptimum") \
1597             or selfA._toStore("APosterioriCovariance"):
1598             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
1599             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
1600             J   = Jb + Jo
1601             selfA.StoredVariables["CostFunctionJb"].store( Jb )
1602             selfA.StoredVariables["CostFunctionJo"].store( Jo )
1603             selfA.StoredVariables["CostFunctionJ" ].store( J )
1604             #
1605             if selfA._toStore("IndexOfOptimum") \
1606                 or selfA._toStore("CurrentOptimum") \
1607                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
1608                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
1609                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
1610                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1611                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1612             if selfA._toStore("IndexOfOptimum"):
1613                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1614             if selfA._toStore("CurrentOptimum"):
1615                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
1616             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1617                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
1618             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1619                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1620             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1621                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1622             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1623                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1624         if selfA._toStore("APosterioriCovariance"):
1625             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
1626         if selfA._parameters["EstimationOf"] == "Parameters" \
1627             and J < previousJMinimum:
1628             previousJMinimum    = J
1629             XaMin               = Xa
1630             if selfA._toStore("APosterioriCovariance"):
1631                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
1632     #
1633     # Stockage final supplémentaire de l'optimum en estimation de paramètres
1634     # ----------------------------------------------------------------------
1635     if selfA._parameters["EstimationOf"] == "Parameters":
1636         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
1637         selfA.StoredVariables["Analysis"].store( XaMin )
1638         if selfA._toStore("APosterioriCovariance"):
1639             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
1640         if selfA._toStore("BMA"):
1641             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
1642     #
1643     return 0
1644
1645 # ==============================================================================
1646 def incr3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
1647     """
1648     3DVAR incrémental
1649     """
1650     #
1651     # Initialisations
1652     # ---------------
1653     #
1654     # Opérateur non-linéaire pour la boucle externe
1655     Hm = HO["Direct"].appliedTo
1656     #
1657     # Précalcul des inversions de B et R
1658     BI = B.getI()
1659     RI = R.getI()
1660     #
1661     # Point de démarrage de l'optimisation
1662     Xini = selfA._parameters["InitializationPoint"]
1663     #
1664     HXb = numpy.asmatrix(numpy.ravel( Hm( Xb ) )).T
1665     Innovation = Y - HXb
1666     #
1667     # Outer Loop
1668     # ----------
1669     iOuter = 0
1670     J      = 1./mpr
1671     DeltaJ = 1./mpr
1672     Xr     = Xini.reshape((-1,1))
1673     while abs(DeltaJ) >= selfA._parameters["CostDecrementTolerance"] and iOuter <= selfA._parameters["MaximumNumberOfSteps"]:
1674         #
1675         # Inner Loop
1676         # ----------
1677         Ht = HO["Tangent"].asMatrix(Xr)
1678         Ht = Ht.reshape(Y.size,Xr.size) # ADAO & check shape
1679         #
1680         # Définition de la fonction-coût
1681         # ------------------------------
1682         def CostFunction(dx):
1683             _dX  = numpy.asmatrix(numpy.ravel( dx )).T
1684             if selfA._parameters["StoreInternalVariables"] or \
1685                 selfA._toStore("CurrentState") or \
1686                 selfA._toStore("CurrentOptimum"):
1687                 selfA.StoredVariables["CurrentState"].store( Xb + _dX )
1688             _HdX = Ht * _dX
1689             _HdX = numpy.asmatrix(numpy.ravel( _HdX )).T
1690             _dInnovation = Innovation - _HdX
1691             if selfA._toStore("SimulatedObservationAtCurrentState") or \
1692                 selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1693                 selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HXb + _HdX )
1694             if selfA._toStore("InnovationAtCurrentState"):
1695                 selfA.StoredVariables["InnovationAtCurrentState"].store( _dInnovation )
1696             #
1697             Jb  = float( 0.5 * _dX.T * BI * _dX )
1698             Jo  = float( 0.5 * _dInnovation.T * RI * _dInnovation )
1699             J   = Jb + Jo
1700             #
1701             selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
1702             selfA.StoredVariables["CostFunctionJb"].store( Jb )
1703             selfA.StoredVariables["CostFunctionJo"].store( Jo )
1704             selfA.StoredVariables["CostFunctionJ" ].store( J )
1705             if selfA._toStore("IndexOfOptimum") or \
1706                 selfA._toStore("CurrentOptimum") or \
1707                 selfA._toStore("CostFunctionJAtCurrentOptimum") or \
1708                 selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
1709                 selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
1710                 selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1711                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1712             if selfA._toStore("IndexOfOptimum"):
1713                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
1714             if selfA._toStore("CurrentOptimum"):
1715                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
1716             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1717                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
1718             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
1719                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
1720             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
1721                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
1722             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
1723                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
1724             return J
1725         #
1726         def GradientOfCostFunction(dx):
1727             _dX          = numpy.asmatrix(numpy.ravel( dx )).T
1728             _HdX         = Ht * _dX
1729             _HdX         = numpy.asmatrix(numpy.ravel( _HdX )).T
1730             _dInnovation = Innovation - _HdX
1731             GradJb       = BI * _dX
1732             GradJo       = - Ht.T @ (RI * _dInnovation)
1733             GradJ        = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
1734             return GradJ
1735         #
1736         # Minimisation de la fonctionnelle
1737         # --------------------------------
1738         nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
1739         #
1740         if selfA._parameters["Minimizer"] == "LBFGSB":
1741             # Minimum, J_optimal, Informations = scipy.optimize.fmin_l_bfgs_b(
1742             if "0.19" <= scipy.version.version <= "1.1.0":
1743                 import lbfgsbhlt as optimiseur
1744             else:
1745                 import scipy.optimize as optimiseur
1746             Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
1747                 func        = CostFunction,
1748                 x0          = numpy.zeros(Xini.size),
1749                 fprime      = GradientOfCostFunction,
1750                 args        = (),
1751                 bounds      = selfA._parameters["Bounds"],
1752                 maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
1753                 factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
1754                 pgtol       = selfA._parameters["ProjectedGradientTolerance"],
1755                 iprint      = selfA._parameters["optiprint"],
1756                 )
1757             nfeval = Informations['funcalls']
1758             rc     = Informations['warnflag']
1759         elif selfA._parameters["Minimizer"] == "TNC":
1760             Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
1761                 func        = CostFunction,
1762                 x0          = numpy.zeros(Xini.size),
1763                 fprime      = GradientOfCostFunction,
1764                 args        = (),
1765                 bounds      = selfA._parameters["Bounds"],
1766                 maxfun      = selfA._parameters["MaximumNumberOfSteps"],
1767                 pgtol       = selfA._parameters["ProjectedGradientTolerance"],
1768                 ftol        = selfA._parameters["CostDecrementTolerance"],
1769                 messages    = selfA._parameters["optmessages"],
1770                 )
1771         elif selfA._parameters["Minimizer"] == "CG":
1772             Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
1773                 f           = CostFunction,
1774                 x0          = numpy.zeros(Xini.size),
1775                 fprime      = GradientOfCostFunction,
1776                 args        = (),
1777                 maxiter     = selfA._parameters["MaximumNumberOfSteps"],
1778                 gtol        = selfA._parameters["GradientNormTolerance"],
1779                 disp        = selfA._parameters["optdisp"],
1780                 full_output = True,
1781                 )
1782         elif selfA._parameters["Minimizer"] == "NCG":
1783             Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
1784                 f           = CostFunction,
1785                 x0          = numpy.zeros(Xini.size),
1786                 fprime      = GradientOfCostFunction,
1787                 args        = (),
1788                 maxiter     = selfA._parameters["MaximumNumberOfSteps"],
1789                 avextol     = selfA._parameters["CostDecrementTolerance"],
1790                 disp        = selfA._parameters["optdisp"],
1791                 full_output = True,
1792                 )
1793         elif selfA._parameters["Minimizer"] == "BFGS":
1794             Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
1795                 f           = CostFunction,
1796                 x0          = numpy.zeros(Xini.size),
1797                 fprime      = GradientOfCostFunction,
1798                 args        = (),
1799                 maxiter     = selfA._parameters["MaximumNumberOfSteps"],
1800                 gtol        = selfA._parameters["GradientNormTolerance"],
1801                 disp        = selfA._parameters["optdisp"],
1802                 full_output = True,
1803                 )
1804         else:
1805             raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
1806         #
1807         IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
1808         MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
1809         #
1810         if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
1811             Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
1812             Minimum = numpy.asmatrix(numpy.ravel( Minimum )).T
1813         else:
1814             Minimum = Xb + numpy.asmatrix(numpy.ravel( Minimum )).T
1815         #
1816         Xr     = Minimum
1817         DeltaJ = selfA.StoredVariables["CostFunctionJ" ][-1] - J
1818         iOuter = selfA.StoredVariables["CurrentIterationNumber"][-1]
1819     #
1820     # Obtention de l'analyse
1821     # ----------------------
1822     Xa = Xr
1823     #
1824     selfA.StoredVariables["Analysis"].store( Xa )
1825     #
1826     if selfA._toStore("OMA") or \
1827         selfA._toStore("SigmaObs2") or \
1828         selfA._toStore("SimulationQuantiles") or \
1829         selfA._toStore("SimulatedObservationAtOptimum"):
1830         if selfA._toStore("SimulatedObservationAtCurrentState"):
1831             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
1832         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
1833             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
1834         else:
1835             HXa = Hm( Xa )
1836     #
1837     # Calcul de la covariance d'analyse
1838     # ---------------------------------
1839     if selfA._toStore("APosterioriCovariance") or \
1840         selfA._toStore("SimulationQuantiles") or \
1841         selfA._toStore("JacobianMatrixAtOptimum") or \
1842         selfA._toStore("KalmanGainAtOptimum"):
1843         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
1844         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
1845     if selfA._toStore("APosterioriCovariance") or \
1846         selfA._toStore("SimulationQuantiles") or \
1847         selfA._toStore("KalmanGainAtOptimum"):
1848         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
1849         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
1850     if selfA._toStore("APosterioriCovariance") or \
1851         selfA._toStore("SimulationQuantiles"):
1852         HessienneI = []
1853         nb = Xa.size
1854         for i in range(nb):
1855             _ee    = numpy.matrix(numpy.zeros(nb)).T
1856             _ee[i] = 1.
1857             _HtEE  = numpy.dot(HtM,_ee)
1858             _HtEE  = numpy.asmatrix(numpy.ravel( _HtEE )).T
1859             HessienneI.append( numpy.ravel( BI*_ee + HaM * (RI * _HtEE) ) )
1860         HessienneI = numpy.matrix( HessienneI )
1861         A = HessienneI.I
1862         if min(A.shape) != max(A.shape):
1863             raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(selfA._name,str(A.shape)))
1864         if (numpy.diag(A) < 0).any():
1865             raise ValueError("The %s a posteriori covariance matrix A has at least one negative value on its diagonal. There is an error in the observation operator, please check it."%(selfA._name,))
1866         if logging.getLogger().level < logging.WARNING: # La verification n'a lieu qu'en debug
1867             try:
1868                 L = numpy.linalg.cholesky( A )
1869             except:
1870                 raise ValueError("The %s a posteriori covariance matrix A is not symmetric positive-definite. Please check your a priori covariances and your observation operator."%(selfA._name,))
1871     if selfA._toStore("APosterioriCovariance"):
1872         selfA.StoredVariables["APosterioriCovariance"].store( A )
1873     if selfA._toStore("JacobianMatrixAtOptimum"):
1874         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
1875     if selfA._toStore("KalmanGainAtOptimum"):
1876         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
1877         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
1878         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
1879     #
1880     # Calculs et/ou stockages supplémentaires
1881     # ---------------------------------------
1882     if selfA._toStore("Innovation") or \
1883         selfA._toStore("SigmaObs2") or \
1884         selfA._toStore("MahalanobisConsistency") or \
1885         selfA._toStore("OMB"):
1886         d  = Y - HXb
1887     if selfA._toStore("Innovation"):
1888         selfA.StoredVariables["Innovation"].store( numpy.ravel(d) )
1889     if selfA._toStore("BMA"):
1890         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
1891     if selfA._toStore("OMA"):
1892         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
1893     if selfA._toStore("OMB"):
1894         selfA.StoredVariables["OMB"].store( numpy.ravel(d) )
1895     if selfA._toStore("SigmaObs2"):
1896         TraceR = R.trace(Y.size)
1897         selfA.StoredVariables["SigmaObs2"].store( float( (d.T * (numpy.asmatrix(numpy.ravel(Y)).T-numpy.asmatrix(numpy.ravel(HXa)).T)) ) / TraceR )
1898     if selfA._toStore("MahalanobisConsistency"):
1899         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
1900     if selfA._toStore("SimulationQuantiles"):
1901         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
1902     if selfA._toStore("SimulatedObservationAtBackground"):
1903         selfA.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
1904     if selfA._toStore("SimulatedObservationAtOptimum"):
1905         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
1906     #
1907     return 0
1908
1909 # ==============================================================================
1910 def mlef(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="MLEF13",
1911     BnotT=False, _epsilon=1.e-3, _e=1.e-7, _jmax=15000):
1912     """
1913     Maximum Likelihood Ensemble Filter
1914     """
1915     if selfA._parameters["EstimationOf"] == "Parameters":
1916         selfA._parameters["StoreInternalVariables"] = True
1917     #
1918     # Opérateurs
1919     H = HO["Direct"].appliedControledFormTo
1920     #
1921     if selfA._parameters["EstimationOf"] == "State":
1922         M = EM["Direct"].appliedControledFormTo
1923     #
1924     if CM is not None and "Tangent" in CM and U is not None:
1925         Cm = CM["Tangent"].asMatrix(Xb)
1926     else:
1927         Cm = None
1928     #
1929     # Durée d'observation et tailles
1930     if hasattr(Y,"stepnumber"):
1931         duration = Y.stepnumber()
1932         __p = numpy.cumprod(Y.shape())[-1]
1933     else:
1934         duration = 2
1935         __p = numpy.array(Y).size
1936     #
1937     # Précalcul des inversions de B et R
1938     if selfA._parameters["StoreInternalVariables"] \
1939         or selfA._toStore("CostFunctionJ") \
1940         or selfA._toStore("CostFunctionJb") \
1941         or selfA._toStore("CostFunctionJo") \
1942         or selfA._toStore("CurrentOptimum") \
1943         or selfA._toStore("APosterioriCovariance"):
1944         BI = B.getI()
1945     RI = R.getI()
1946     #
1947     __n = Xb.size
1948     __m = selfA._parameters["NumberOfMembers"]
1949     #
1950     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
1951         Xn = EnsembleOfBackgroundPerturbations( Xb, None, __m )
1952         selfA.StoredVariables["Analysis"].store( Xb )
1953         if selfA._toStore("APosterioriCovariance"):
1954             if hasattr(B,"asfullmatrix"):
1955                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
1956             else:
1957                 selfA.StoredVariables["APosterioriCovariance"].store( B )
1958         selfA._setInternalState("seed", numpy.random.get_state())
1959     elif selfA._parameters["nextStep"]:
1960         Xn = selfA._getInternalState("Xn")
1961     #
1962     previousJMinimum = numpy.finfo(float).max
1963     #
1964     for step in range(duration-1):
1965         numpy.random.set_state(selfA._getInternalState("seed"))
1966         if hasattr(Y,"store"):
1967             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
1968         else:
1969             Ynpu = numpy.ravel( Y ).reshape((__p,1))
1970         #
1971         if U is not None:
1972             if hasattr(U,"store") and len(U)>1:
1973                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
1974             elif hasattr(U,"store") and len(U)==1:
1975                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
1976             else:
1977                 Un = numpy.asmatrix(numpy.ravel( U )).T
1978         else:
1979             Un = None
1980         #
1981         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
1982             Xn = CovarianceInflation( Xn,
1983                 selfA._parameters["InflationType"],
1984                 selfA._parameters["InflationFactor"],
1985                 )
1986         #
1987         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
1988             EMX = M( [(Xn[:,i], Un) for i in range(__m)],
1989                 argsAsSerie = True,
1990                 returnSerieAsArrayMatrix = True )
1991             Xn_predicted = EnsemblePerturbationWithGivenCovariance( EMX, Q )
1992             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
1993                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
1994                 Xn_predicted = Xn_predicted + Cm * Un
1995         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
1996             # --- > Par principe, M = Id, Q = 0
1997             Xn_predicted = EMX = Xn
1998         #
1999         #--------------------------
2000         if VariantM == "MLEF13":
2001             Xfm = numpy.ravel(Xn_predicted.mean(axis=1, dtype=mfp).astype('float'))
2002             EaX = EnsembleOfAnomalies( Xn_predicted, Xfm, 1./math.sqrt(__m-1) )
2003             Ua  = numpy.identity(__m)
2004             __j = 0
2005             Deltaw = 1
2006             if not BnotT:
2007                 Ta  = numpy.identity(__m)
2008             vw  = numpy.zeros(__m)
2009             while numpy.linalg.norm(Deltaw) >= _e and __j <= _jmax:
2010                 vx1 = (Xfm + EaX @ vw).reshape((__n,1))
2011                 #
2012                 if BnotT:
2013                     E1 = vx1 + _epsilon * EaX
2014                 else:
2015                     E1 = vx1 + math.sqrt(__m-1) * EaX @ Ta
2016                 #
2017                 HE2 = H( [(E1[:,i,numpy.newaxis], Un) for i in range(__m)],
2018                     argsAsSerie = True,
2019                     returnSerieAsArrayMatrix = True )
2020                 vy2 = HE2.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
2021                 #
2022                 if BnotT:
2023                     EaY = (HE2 - vy2) / _epsilon
2024                 else:
2025                     EaY = ( (HE2 - vy2) @ numpy.linalg.inv(Ta) ) / math.sqrt(__m-1)
2026                 #
2027                 GradJ = numpy.ravel(vw[:,None] - EaY.transpose() @ (RI * ( Ynpu - vy2 )))
2028                 mH = numpy.identity(__m) + EaY.transpose() @ (RI * EaY).reshape((-1,__m))
2029                 Deltaw = - numpy.linalg.solve(mH,GradJ)
2030                 #
2031                 vw = vw + Deltaw
2032                 #
2033                 if not BnotT:
2034                     Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
2035                 #
2036                 __j = __j + 1
2037             #
2038             if BnotT:
2039                 Ta = numpy.real(scipy.linalg.sqrtm(numpy.linalg.inv( mH )))
2040             #
2041             Xn = vx1 + math.sqrt(__m-1) * EaX @ Ta @ Ua
2042         #--------------------------
2043         else:
2044             raise ValueError("VariantM has to be chosen in the authorized methods list.")
2045         #
2046         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
2047             Xn = CovarianceInflation( Xn,
2048                 selfA._parameters["InflationType"],
2049                 selfA._parameters["InflationFactor"],
2050                 )
2051         #
2052         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
2053         #--------------------------
2054         selfA._setInternalState("Xn", Xn)
2055         selfA._setInternalState("seed", numpy.random.get_state())
2056         #--------------------------
2057         #
2058         if selfA._parameters["StoreInternalVariables"] \
2059             or selfA._toStore("CostFunctionJ") \
2060             or selfA._toStore("CostFunctionJb") \
2061             or selfA._toStore("CostFunctionJo") \
2062             or selfA._toStore("APosterioriCovariance") \
2063             or selfA._toStore("InnovationAtCurrentAnalysis") \
2064             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
2065             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2066             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
2067             _Innovation = Ynpu - _HXa
2068         #
2069         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2070         # ---> avec analysis
2071         selfA.StoredVariables["Analysis"].store( Xa )
2072         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
2073             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
2074         if selfA._toStore("InnovationAtCurrentAnalysis"):
2075             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
2076         # ---> avec current state
2077         if selfA._parameters["StoreInternalVariables"] \
2078             or selfA._toStore("CurrentState"):
2079             selfA.StoredVariables["CurrentState"].store( Xn )
2080         if selfA._toStore("ForecastState"):
2081             selfA.StoredVariables["ForecastState"].store( EMX )
2082         if selfA._toStore("ForecastCovariance"):
2083             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(EMX) )
2084         if selfA._toStore("BMA"):
2085             selfA.StoredVariables["BMA"].store( EMX - Xa )
2086         if selfA._toStore("InnovationAtCurrentState"):
2087             selfA.StoredVariables["InnovationAtCurrentState"].store( - HE2 + Ynpu )
2088         if selfA._toStore("SimulatedObservationAtCurrentState") \
2089             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2090             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HE2 )
2091         # ---> autres
2092         if selfA._parameters["StoreInternalVariables"] \
2093             or selfA._toStore("CostFunctionJ") \
2094             or selfA._toStore("CostFunctionJb") \
2095             or selfA._toStore("CostFunctionJo") \
2096             or selfA._toStore("CurrentOptimum") \
2097             or selfA._toStore("APosterioriCovariance"):
2098             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
2099             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
2100             J   = Jb + Jo
2101             selfA.StoredVariables["CostFunctionJb"].store( Jb )
2102             selfA.StoredVariables["CostFunctionJo"].store( Jo )
2103             selfA.StoredVariables["CostFunctionJ" ].store( J )
2104             #
2105             if selfA._toStore("IndexOfOptimum") \
2106                 or selfA._toStore("CurrentOptimum") \
2107                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
2108                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
2109                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
2110                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2111                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2112             if selfA._toStore("IndexOfOptimum"):
2113                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
2114             if selfA._toStore("CurrentOptimum"):
2115                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
2116             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2117                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
2118             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
2119                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
2120             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
2121                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
2122             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
2123                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
2124         if selfA._toStore("APosterioriCovariance"):
2125             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
2126         if selfA._parameters["EstimationOf"] == "Parameters" \
2127             and J < previousJMinimum:
2128             previousJMinimum    = J
2129             XaMin               = Xa
2130             if selfA._toStore("APosterioriCovariance"):
2131                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
2132     #
2133     # Stockage final supplémentaire de l'optimum en estimation de paramètres
2134     # ----------------------------------------------------------------------
2135     if selfA._parameters["EstimationOf"] == "Parameters":
2136         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2137         selfA.StoredVariables["Analysis"].store( XaMin )
2138         if selfA._toStore("APosterioriCovariance"):
2139             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
2140         if selfA._toStore("BMA"):
2141             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
2142     #
2143     return 0
2144
2145 # ==============================================================================
2146 def mmqr(
2147         func     = None,
2148         x0       = None,
2149         fprime   = None,
2150         bounds   = None,
2151         quantile = 0.5,
2152         maxfun   = 15000,
2153         toler    = 1.e-06,
2154         y        = None,
2155         ):
2156     """
2157     Implémentation informatique de l'algorithme MMQR, basée sur la publication :
2158     David R. Hunter, Kenneth Lange, "Quantile Regression via an MM Algorithm",
2159     Journal of Computational and Graphical Statistics, 9, 1, pp.60-77, 2000.
2160     """
2161     #
2162     # Recuperation des donnees et informations initiales
2163     # --------------------------------------------------
2164     variables = numpy.ravel( x0 )
2165     mesures   = numpy.ravel( y )
2166     increment = sys.float_info[0]
2167     p         = variables.size
2168     n         = mesures.size
2169     quantile  = float(quantile)
2170     #
2171     # Calcul des parametres du MM
2172     # ---------------------------
2173     tn      = float(toler) / n
2174     e0      = -tn / math.log(tn)
2175     epsilon = (e0-tn)/(1+math.log(e0))
2176     #
2177     # Calculs d'initialisation
2178     # ------------------------
2179     residus  = mesures - numpy.ravel( func( variables ) )
2180     poids    = 1./(epsilon+numpy.abs(residus))
2181     veps     = 1. - 2. * quantile - residus * poids
2182     lastsurrogate = -numpy.sum(residus*veps) - (1.-2.*quantile)*numpy.sum(residus)
2183     iteration = 0
2184     #
2185     # Recherche iterative
2186     # -------------------
2187     while (increment > toler) and (iteration < maxfun) :
2188         iteration += 1
2189         #
2190         Derivees  = numpy.array(fprime(variables))
2191         Derivees  = Derivees.reshape(n,p) # Necessaire pour remettre en place la matrice si elle passe par des tuyaux YACS
2192         DeriveesT = Derivees.transpose()
2193         M         =   numpy.dot( DeriveesT , (numpy.array(numpy.matrix(p*[poids,]).T)*Derivees) )
2194         SM        =   numpy.transpose(numpy.dot( DeriveesT , veps ))
2195         step      = - numpy.linalg.lstsq( M, SM, rcond=-1 )[0]
2196         #
2197         variables = variables + step
2198         if bounds is not None:
2199             # Attention : boucle infinie à éviter si un intervalle est trop petit
2200             while( (variables < numpy.ravel(numpy.asmatrix(bounds)[:,0])).any() or (variables > numpy.ravel(numpy.asmatrix(bounds)[:,1])).any() ):
2201                 step      = step/2.
2202                 variables = variables - step
2203         residus   = mesures - numpy.ravel( func(variables) )
2204         surrogate = numpy.sum(residus**2 * poids) + (4.*quantile-2.) * numpy.sum(residus)
2205         #
2206         while ( (surrogate > lastsurrogate) and ( max(list(numpy.abs(step))) > 1.e-16 ) ) :
2207             step      = step/2.
2208             variables = variables - step
2209             residus   = mesures - numpy.ravel( func(variables) )
2210             surrogate = numpy.sum(residus**2 * poids) + (4.*quantile-2.) * numpy.sum(residus)
2211         #
2212         increment     = lastsurrogate-surrogate
2213         poids         = 1./(epsilon+numpy.abs(residus))
2214         veps          = 1. - 2. * quantile - residus * poids
2215         lastsurrogate = -numpy.sum(residus * veps) - (1.-2.*quantile)*numpy.sum(residus)
2216     #
2217     # Mesure d'écart
2218     # --------------
2219     Ecart = quantile * numpy.sum(residus) - numpy.sum( residus[residus<0] )
2220     #
2221     return variables, Ecart, [n,p,iteration,increment,0]
2222
2223 # ==============================================================================
2224 def multi3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, oneCycle):
2225     """
2226     3DVAR multi-pas et multi-méthodes
2227     """
2228     #
2229     # Initialisation
2230     if selfA._parameters["EstimationOf"] == "State":
2231         M = EM["Direct"].appliedTo
2232         #
2233         if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
2234             Xn = numpy.ravel(Xb).reshape((-1,1))
2235             selfA.StoredVariables["Analysis"].store( Xn )
2236             if selfA._toStore("APosterioriCovariance"):
2237                 if hasattr(B,"asfullmatrix"):
2238                     selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(Xn.size) )
2239                 else:
2240                     selfA.StoredVariables["APosterioriCovariance"].store( B )
2241             if selfA._toStore("ForecastState"):
2242                 selfA.StoredVariables["ForecastState"].store( Xn )
2243         elif selfA._parameters["nextStep"]:
2244             Xn = selfA._getInternalState("Xn")
2245     else:
2246         Xn = numpy.ravel(Xb).reshape((-1,1))
2247     #
2248     if hasattr(Y,"stepnumber"):
2249         duration = Y.stepnumber()
2250     else:
2251         duration = 2
2252     #
2253     # Multi-pas
2254     for step in range(duration-1):
2255         if hasattr(Y,"store"):
2256             Ynpu = numpy.ravel( Y[step+1] ).reshape((-1,1))
2257         else:
2258             Ynpu = numpy.ravel( Y ).reshape((-1,1))
2259         #
2260         if selfA._parameters["EstimationOf"] == "State": # Forecast
2261             Xn_predicted = M( Xn )
2262             if selfA._toStore("ForecastState"):
2263                 selfA.StoredVariables["ForecastState"].store( Xn_predicted )
2264         elif selfA._parameters["EstimationOf"] == "Parameters": # No forecast
2265             # --- > Par principe, M = Id, Q = 0
2266             Xn_predicted = Xn
2267         Xn_predicted = numpy.ravel(Xn_predicted).reshape((-1,1))
2268         #
2269         oneCycle(selfA, Xn_predicted, Ynpu, U, HO, None, None, R, B, None)
2270         #
2271         Xn = selfA.StoredVariables["Analysis"][-1]
2272         #--------------------------
2273         selfA._setInternalState("Xn", Xn)
2274     #
2275     return 0
2276
2277 # ==============================================================================
2278 def psas3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
2279     """
2280     3DVAR PSAS
2281     """
2282     #
2283     # Initialisations
2284     # ---------------
2285     #
2286     # Opérateurs
2287     Hm = HO["Direct"].appliedTo
2288     #
2289     # Utilisation éventuelle d'un vecteur H(Xb) précalculé
2290     if HO["AppliedInX"] is not None and "HXb" in HO["AppliedInX"]:
2291         HXb = Hm( Xb, HO["AppliedInX"]["HXb"] )
2292     else:
2293         HXb = Hm( Xb )
2294     HXb = numpy.asmatrix(numpy.ravel( HXb )).T
2295     if Y.size != HXb.size:
2296         raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
2297     if max(Y.shape) != max(HXb.shape):
2298         raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
2299     #
2300     if selfA._toStore("JacobianMatrixAtBackground"):
2301         HtMb = HO["Tangent"].asMatrix(ValueForMethodForm = Xb)
2302         HtMb = HtMb.reshape(Y.size,Xb.size) # ADAO & check shape
2303         selfA.StoredVariables["JacobianMatrixAtBackground"].store( HtMb )
2304     #
2305     Ht = HO["Tangent"].asMatrix(Xb)
2306     BHT = B * Ht.T
2307     HBHTpR = R + Ht * BHT
2308     Innovation = Y - HXb
2309     #
2310     # Point de démarrage de l'optimisation
2311     Xini = numpy.zeros(Xb.shape)
2312     #
2313     # Définition de la fonction-coût
2314     # ------------------------------
2315     def CostFunction(w):
2316         _W = numpy.asmatrix(numpy.ravel( w )).T
2317         if selfA._parameters["StoreInternalVariables"] or \
2318             selfA._toStore("CurrentState") or \
2319             selfA._toStore("CurrentOptimum"):
2320             selfA.StoredVariables["CurrentState"].store( Xb + BHT * _W )
2321         if selfA._toStore("SimulatedObservationAtCurrentState") or \
2322             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2323             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( Hm( Xb + BHT * _W ) )
2324         if selfA._toStore("InnovationAtCurrentState"):
2325             selfA.StoredVariables["InnovationAtCurrentState"].store( Innovation )
2326         #
2327         Jb  = float( 0.5 * _W.T * HBHTpR * _W )
2328         Jo  = float( - _W.T * Innovation )
2329         J   = Jb + Jo
2330         #
2331         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
2332         selfA.StoredVariables["CostFunctionJb"].store( Jb )
2333         selfA.StoredVariables["CostFunctionJo"].store( Jo )
2334         selfA.StoredVariables["CostFunctionJ" ].store( J )
2335         if selfA._toStore("IndexOfOptimum") or \
2336             selfA._toStore("CurrentOptimum") or \
2337             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
2338             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
2339             selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
2340             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2341             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2342         if selfA._toStore("IndexOfOptimum"):
2343             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
2344         if selfA._toStore("CurrentOptimum"):
2345             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
2346         if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2347             selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
2348         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
2349             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
2350         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
2351             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
2352         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
2353             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
2354         return J
2355     #
2356     def GradientOfCostFunction(w):
2357         _W = numpy.asmatrix(numpy.ravel( w )).T
2358         GradJb  = HBHTpR * _W
2359         GradJo  = - Innovation
2360         GradJ   = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
2361         return GradJ
2362     #
2363     # Minimisation de la fonctionnelle
2364     # --------------------------------
2365     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
2366     #
2367     if selfA._parameters["Minimizer"] == "LBFGSB":
2368         if "0.19" <= scipy.version.version <= "1.1.0":
2369             import lbfgsbhlt as optimiseur
2370         else:
2371             import scipy.optimize as optimiseur
2372         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
2373             func        = CostFunction,
2374             x0          = Xini,
2375             fprime      = GradientOfCostFunction,
2376             args        = (),
2377             bounds      = selfA._parameters["Bounds"],
2378             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
2379             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
2380             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
2381             iprint      = selfA._parameters["optiprint"],
2382             )
2383         nfeval = Informations['funcalls']
2384         rc     = Informations['warnflag']
2385     elif selfA._parameters["Minimizer"] == "TNC":
2386         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
2387             func        = CostFunction,
2388             x0          = Xini,
2389             fprime      = GradientOfCostFunction,
2390             args        = (),
2391             bounds      = selfA._parameters["Bounds"],
2392             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
2393             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
2394             ftol        = selfA._parameters["CostDecrementTolerance"],
2395             messages    = selfA._parameters["optmessages"],
2396             )
2397     elif selfA._parameters["Minimizer"] == "CG":
2398         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
2399             f           = CostFunction,
2400             x0          = Xini,
2401             fprime      = GradientOfCostFunction,
2402             args        = (),
2403             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2404             gtol        = selfA._parameters["GradientNormTolerance"],
2405             disp        = selfA._parameters["optdisp"],
2406             full_output = True,
2407             )
2408     elif selfA._parameters["Minimizer"] == "NCG":
2409         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
2410             f           = CostFunction,
2411             x0          = Xini,
2412             fprime      = GradientOfCostFunction,
2413             args        = (),
2414             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2415             avextol     = selfA._parameters["CostDecrementTolerance"],
2416             disp        = selfA._parameters["optdisp"],
2417             full_output = True,
2418             )
2419     elif selfA._parameters["Minimizer"] == "BFGS":
2420         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
2421             f           = CostFunction,
2422             x0          = Xini,
2423             fprime      = GradientOfCostFunction,
2424             args        = (),
2425             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
2426             gtol        = selfA._parameters["GradientNormTolerance"],
2427             disp        = selfA._parameters["optdisp"],
2428             full_output = True,
2429             )
2430     else:
2431         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
2432     #
2433     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2434     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
2435     #
2436     # Correction pour pallier a un bug de TNC sur le retour du Minimum
2437     # ----------------------------------------------------------------
2438     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
2439         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
2440         Minimum = numpy.asmatrix(numpy.ravel( Minimum )).T
2441     else:
2442         Minimum = Xb + BHT * numpy.asmatrix(numpy.ravel( Minimum )).T
2443     #
2444     # Obtention de l'analyse
2445     # ----------------------
2446     Xa = Minimum
2447     #
2448     selfA.StoredVariables["Analysis"].store( Xa )
2449     #
2450     if selfA._toStore("OMA") or \
2451         selfA._toStore("SigmaObs2") or \
2452         selfA._toStore("SimulationQuantiles") or \
2453         selfA._toStore("SimulatedObservationAtOptimum"):
2454         if selfA._toStore("SimulatedObservationAtCurrentState"):
2455             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
2456         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2457             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
2458         else:
2459             HXa = Hm( Xa )
2460     #
2461     # Calcul de la covariance d'analyse
2462     # ---------------------------------
2463     if selfA._toStore("APosterioriCovariance") or \
2464         selfA._toStore("SimulationQuantiles") or \
2465         selfA._toStore("JacobianMatrixAtOptimum") or \
2466         selfA._toStore("KalmanGainAtOptimum"):
2467         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
2468         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
2469     if selfA._toStore("APosterioriCovariance") or \
2470         selfA._toStore("SimulationQuantiles") or \
2471         selfA._toStore("KalmanGainAtOptimum"):
2472         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
2473         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
2474     if selfA._toStore("APosterioriCovariance") or \
2475         selfA._toStore("SimulationQuantiles"):
2476         BI = B.getI()
2477         RI = R.getI()
2478         HessienneI = []
2479         nb = Xa.size
2480         for i in range(nb):
2481             _ee    = numpy.matrix(numpy.zeros(nb)).T
2482             _ee[i] = 1.
2483             _HtEE  = numpy.dot(HtM,_ee)
2484             _HtEE  = numpy.asmatrix(numpy.ravel( _HtEE )).T
2485             HessienneI.append( numpy.ravel( BI*_ee + HaM * (RI * _HtEE) ) )
2486         HessienneI = numpy.matrix( HessienneI )
2487         A = HessienneI.I
2488         if min(A.shape) != max(A.shape):
2489             raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(selfA._name,str(A.shape)))
2490         if (numpy.diag(A) < 0).any():
2491             raise ValueError("The %s a posteriori covariance matrix A has at least one negative value on its diagonal. There is an error in the observation operator, please check it."%(selfA._name,))
2492         if logging.getLogger().level < logging.WARNING: # La verification n'a lieu qu'en debug
2493             try:
2494                 L = numpy.linalg.cholesky( A )
2495             except:
2496                 raise ValueError("The %s a posteriori covariance matrix A is not symmetric positive-definite. Please check your a priori covariances and your observation operator."%(selfA._name,))
2497     if selfA._toStore("APosterioriCovariance"):
2498         selfA.StoredVariables["APosterioriCovariance"].store( A )
2499     if selfA._toStore("JacobianMatrixAtOptimum"):
2500         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
2501     if selfA._toStore("KalmanGainAtOptimum"):
2502         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
2503         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
2504         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
2505     #
2506     # Calculs et/ou stockages supplémentaires
2507     # ---------------------------------------
2508     if selfA._toStore("Innovation") or \
2509         selfA._toStore("SigmaObs2") or \
2510         selfA._toStore("MahalanobisConsistency") or \
2511         selfA._toStore("OMB"):
2512         d  = Y - HXb
2513     if selfA._toStore("Innovation"):
2514         selfA.StoredVariables["Innovation"].store( numpy.ravel(d) )
2515     if selfA._toStore("BMA"):
2516         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
2517     if selfA._toStore("OMA"):
2518         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
2519     if selfA._toStore("OMB"):
2520         selfA.StoredVariables["OMB"].store( numpy.ravel(d) )
2521     if selfA._toStore("SigmaObs2"):
2522         TraceR = R.trace(Y.size)
2523         selfA.StoredVariables["SigmaObs2"].store( float( (d.T * (numpy.asmatrix(numpy.ravel(Y)).T-numpy.asmatrix(numpy.ravel(HXa)).T)) ) / TraceR )
2524     if selfA._toStore("MahalanobisConsistency"):
2525         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
2526     if selfA._toStore("SimulationQuantiles"):
2527         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
2528     if selfA._toStore("SimulatedObservationAtBackground"):
2529         selfA.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
2530     if selfA._toStore("SimulatedObservationAtOptimum"):
2531         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
2532     #
2533     return 0
2534
2535 # ==============================================================================
2536 def senkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula16"):
2537     """
2538     Stochastic EnKF
2539     """
2540     if selfA._parameters["EstimationOf"] == "Parameters":
2541         selfA._parameters["StoreInternalVariables"] = True
2542     #
2543     # Opérateurs
2544     H = HO["Direct"].appliedControledFormTo
2545     #
2546     if selfA._parameters["EstimationOf"] == "State":
2547         M = EM["Direct"].appliedControledFormTo
2548     #
2549     if CM is not None and "Tangent" in CM and U is not None:
2550         Cm = CM["Tangent"].asMatrix(Xb)
2551     else:
2552         Cm = None
2553     #
2554     # Durée d'observation et tailles
2555     if hasattr(Y,"stepnumber"):
2556         duration = Y.stepnumber()
2557         __p = numpy.cumprod(Y.shape())[-1]
2558     else:
2559         duration = 2
2560         __p = numpy.array(Y).size
2561     #
2562     # Précalcul des inversions de B et R
2563     if selfA._parameters["StoreInternalVariables"] \
2564         or selfA._toStore("CostFunctionJ") \
2565         or selfA._toStore("CostFunctionJb") \
2566         or selfA._toStore("CostFunctionJo") \
2567         or selfA._toStore("CurrentOptimum") \
2568         or selfA._toStore("APosterioriCovariance"):
2569         BI = B.getI()
2570         RI = R.getI()
2571     #
2572     __n = Xb.size
2573     __m = selfA._parameters["NumberOfMembers"]
2574     #
2575     if hasattr(R,"asfullmatrix"): Rn = R.asfullmatrix(__p)
2576     else:                         Rn = R
2577     #
2578     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
2579         Xn = EnsembleOfBackgroundPerturbations( Xb, None, __m )
2580         selfA.StoredVariables["Analysis"].store( Xb )
2581         if selfA._toStore("APosterioriCovariance"):
2582             if hasattr(B,"asfullmatrix"):
2583                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
2584             else:
2585                 selfA.StoredVariables["APosterioriCovariance"].store( B )
2586         selfA._setInternalState("seed", numpy.random.get_state())
2587     elif selfA._parameters["nextStep"]:
2588         Xn = selfA._getInternalState("Xn")
2589     #
2590     previousJMinimum = numpy.finfo(float).max
2591     #
2592     for step in range(duration-1):
2593         numpy.random.set_state(selfA._getInternalState("seed"))
2594         if hasattr(Y,"store"):
2595             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
2596         else:
2597             Ynpu = numpy.ravel( Y ).reshape((__p,1))
2598         #
2599         if U is not None:
2600             if hasattr(U,"store") and len(U)>1:
2601                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
2602             elif hasattr(U,"store") and len(U)==1:
2603                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
2604             else:
2605                 Un = numpy.asmatrix(numpy.ravel( U )).T
2606         else:
2607             Un = None
2608         #
2609         if selfA._parameters["InflationType"] == "MultiplicativeOnBackgroundAnomalies":
2610             Xn = CovarianceInflation( Xn,
2611                 selfA._parameters["InflationType"],
2612                 selfA._parameters["InflationFactor"],
2613                 )
2614         #
2615         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
2616             EMX = M( [(Xn[:,i], Un) for i in range(__m)],
2617                 argsAsSerie = True,
2618                 returnSerieAsArrayMatrix = True )
2619             Xn_predicted = EnsemblePerturbationWithGivenCovariance( EMX, Q )
2620             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
2621                 argsAsSerie = True,
2622                 returnSerieAsArrayMatrix = True )
2623             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
2624                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
2625                 Xn_predicted = Xn_predicted + Cm * Un
2626         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
2627             # --- > Par principe, M = Id, Q = 0
2628             Xn_predicted = EMX = Xn
2629             HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
2630                 argsAsSerie = True,
2631                 returnSerieAsArrayMatrix = True )
2632         #
2633         # Mean of forecast and observation of forecast
2634         Xfm  = Xn_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
2635         Hfm  = HX_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
2636         #
2637         #--------------------------
2638         if VariantM == "KalmanFilterFormula05":
2639             PfHT, HPfHT = 0., 0.
2640             for i in range(__m):
2641                 Exfi = Xn_predicted[:,i].reshape((__n,1)) - Xfm
2642                 Eyfi = HX_predicted[:,i].reshape((__p,1)) - Hfm
2643                 PfHT  += Exfi * Eyfi.T
2644                 HPfHT += Eyfi * Eyfi.T
2645             PfHT  = (1./(__m-1)) * PfHT
2646             HPfHT = (1./(__m-1)) * HPfHT
2647             Kn     = PfHT * ( R + HPfHT ).I
2648             del PfHT, HPfHT
2649             #
2650             for i in range(__m):
2651                 ri = numpy.random.multivariate_normal(numpy.zeros(__p), Rn)
2652                 Xn[:,i] = numpy.ravel(Xn_predicted[:,i]) + Kn @ (numpy.ravel(Ynpu) + ri - HX_predicted[:,i])
2653         #--------------------------
2654         elif VariantM == "KalmanFilterFormula16":
2655             EpY   = EnsembleOfCenteredPerturbations(Ynpu, Rn, __m)
2656             EpYm  = EpY.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
2657             #
2658             EaX   = EnsembleOfAnomalies( Xn_predicted ) / math.sqrt(__m-1)
2659             EaY = (HX_predicted - Hfm - EpY + EpYm) / math.sqrt(__m-1)
2660             #
2661             Kn = EaX @ EaY.T @ numpy.linalg.inv( EaY @ EaY.T)
2662             #
2663             for i in range(__m):
2664                 Xn[:,i] = numpy.ravel(Xn_predicted[:,i]) + Kn @ (numpy.ravel(EpY[:,i]) - HX_predicted[:,i])
2665         #--------------------------
2666         else:
2667             raise ValueError("VariantM has to be chosen in the authorized methods list.")
2668         #
2669         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
2670             Xn = CovarianceInflation( Xn,
2671                 selfA._parameters["InflationType"],
2672                 selfA._parameters["InflationFactor"],
2673                 )
2674         #
2675         Xa = Xn.mean(axis=1, dtype=mfp).astype('float').reshape((__n,1))
2676         #--------------------------
2677         selfA._setInternalState("Xn", Xn)
2678         selfA._setInternalState("seed", numpy.random.get_state())
2679         #--------------------------
2680         #
2681         if selfA._parameters["StoreInternalVariables"] \
2682             or selfA._toStore("CostFunctionJ") \
2683             or selfA._toStore("CostFunctionJb") \
2684             or selfA._toStore("CostFunctionJo") \
2685             or selfA._toStore("APosterioriCovariance") \
2686             or selfA._toStore("InnovationAtCurrentAnalysis") \
2687             or selfA._toStore("SimulatedObservationAtCurrentAnalysis") \
2688             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2689             _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
2690             _Innovation = Ynpu - _HXa
2691         #
2692         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2693         # ---> avec analysis
2694         selfA.StoredVariables["Analysis"].store( Xa )
2695         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
2696             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
2697         if selfA._toStore("InnovationAtCurrentAnalysis"):
2698             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
2699         # ---> avec current state
2700         if selfA._parameters["StoreInternalVariables"] \
2701             or selfA._toStore("CurrentState"):
2702             selfA.StoredVariables["CurrentState"].store( Xn )
2703         if selfA._toStore("ForecastState"):
2704             selfA.StoredVariables["ForecastState"].store( EMX )
2705         if selfA._toStore("ForecastCovariance"):
2706             selfA.StoredVariables["ForecastCovariance"].store( EnsembleErrorCovariance(EMX) )
2707         if selfA._toStore("BMA"):
2708             selfA.StoredVariables["BMA"].store( EMX - Xa )
2709         if selfA._toStore("InnovationAtCurrentState"):
2710             selfA.StoredVariables["InnovationAtCurrentState"].store( - HX_predicted + Ynpu )
2711         if selfA._toStore("SimulatedObservationAtCurrentState") \
2712             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2713             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
2714         # ---> autres
2715         if selfA._parameters["StoreInternalVariables"] \
2716             or selfA._toStore("CostFunctionJ") \
2717             or selfA._toStore("CostFunctionJb") \
2718             or selfA._toStore("CostFunctionJo") \
2719             or selfA._toStore("CurrentOptimum") \
2720             or selfA._toStore("APosterioriCovariance"):
2721             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
2722             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
2723             J   = Jb + Jo
2724             selfA.StoredVariables["CostFunctionJb"].store( Jb )
2725             selfA.StoredVariables["CostFunctionJo"].store( Jo )
2726             selfA.StoredVariables["CostFunctionJ" ].store( J )
2727             #
2728             if selfA._toStore("IndexOfOptimum") \
2729                 or selfA._toStore("CurrentOptimum") \
2730                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
2731                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
2732                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
2733                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2734                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2735             if selfA._toStore("IndexOfOptimum"):
2736                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
2737             if selfA._toStore("CurrentOptimum"):
2738                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
2739             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2740                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
2741             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
2742                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
2743             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
2744                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
2745             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
2746                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
2747         if selfA._toStore("APosterioriCovariance"):
2748             selfA.StoredVariables["APosterioriCovariance"].store( EnsembleErrorCovariance(Xn) )
2749         if selfA._parameters["EstimationOf"] == "Parameters" \
2750             and J < previousJMinimum:
2751             previousJMinimum    = J
2752             XaMin               = Xa
2753             if selfA._toStore("APosterioriCovariance"):
2754                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
2755     #
2756     # Stockage final supplémentaire de l'optimum en estimation de paramètres
2757     # ----------------------------------------------------------------------
2758     if selfA._parameters["EstimationOf"] == "Parameters":
2759         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2760         selfA.StoredVariables["Analysis"].store( XaMin )
2761         if selfA._toStore("APosterioriCovariance"):
2762             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
2763         if selfA._toStore("BMA"):
2764             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
2765     #
2766     return 0
2767
2768 # ==============================================================================
2769 def stdkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
2770     """
2771     Standard Kalman Filter
2772     """
2773     if selfA._parameters["EstimationOf"] == "Parameters":
2774         selfA._parameters["StoreInternalVariables"] = True
2775     #
2776     # Opérateurs
2777     # ----------
2778     Ht = HO["Tangent"].asMatrix(Xb)
2779     Ha = HO["Adjoint"].asMatrix(Xb)
2780     #
2781     if selfA._parameters["EstimationOf"] == "State":
2782         Mt = EM["Tangent"].asMatrix(Xb)
2783         Ma = EM["Adjoint"].asMatrix(Xb)
2784     #
2785     if CM is not None and "Tangent" in CM and U is not None:
2786         Cm = CM["Tangent"].asMatrix(Xb)
2787     else:
2788         Cm = None
2789     #
2790     # Durée d'observation et tailles
2791     if hasattr(Y,"stepnumber"):
2792         duration = Y.stepnumber()
2793         __p = numpy.cumprod(Y.shape())[-1]
2794     else:
2795         duration = 2
2796         __p = numpy.array(Y).size
2797     #
2798     # Précalcul des inversions de B et R
2799     if selfA._parameters["StoreInternalVariables"] \
2800         or selfA._toStore("CostFunctionJ") \
2801         or selfA._toStore("CostFunctionJb") \
2802         or selfA._toStore("CostFunctionJo") \
2803         or selfA._toStore("CurrentOptimum") \
2804         or selfA._toStore("APosterioriCovariance"):
2805         BI = B.getI()
2806         RI = R.getI()
2807     #
2808     __n = Xb.size
2809     #
2810     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
2811         Xn = Xb
2812         Pn = B
2813         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2814         selfA.StoredVariables["Analysis"].store( Xb )
2815         if selfA._toStore("APosterioriCovariance"):
2816             if hasattr(B,"asfullmatrix"):
2817                 selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
2818             else:
2819                 selfA.StoredVariables["APosterioriCovariance"].store( B )
2820         selfA._setInternalState("seed", numpy.random.get_state())
2821     elif selfA._parameters["nextStep"]:
2822         Xn = selfA._getInternalState("Xn")
2823         Pn = selfA._getInternalState("Pn")
2824     #
2825     if selfA._parameters["EstimationOf"] == "Parameters":
2826         XaMin            = Xn
2827         previousJMinimum = numpy.finfo(float).max
2828     #
2829     for step in range(duration-1):
2830         if hasattr(Y,"store"):
2831             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
2832         else:
2833             Ynpu = numpy.ravel( Y ).reshape((__p,1))
2834         #
2835         if U is not None:
2836             if hasattr(U,"store") and len(U)>1:
2837                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
2838             elif hasattr(U,"store") and len(U)==1:
2839                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
2840             else:
2841                 Un = numpy.asmatrix(numpy.ravel( U )).T
2842         else:
2843             Un = None
2844         #
2845         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
2846             Xn_predicted = Mt * Xn
2847             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
2848                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
2849                 Xn_predicted = Xn_predicted + Cm * Un
2850             Pn_predicted = Q + Mt * Pn * Ma
2851         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
2852             # --- > Par principe, M = Id, Q = 0
2853             Xn_predicted = Xn
2854             Pn_predicted = Pn
2855         #
2856         if selfA._parameters["EstimationOf"] == "State":
2857             HX_predicted = Ht * Xn_predicted
2858             _Innovation  = Ynpu - HX_predicted
2859         elif selfA._parameters["EstimationOf"] == "Parameters":
2860             HX_predicted = Ht * Xn_predicted
2861             _Innovation  = Ynpu - HX_predicted
2862             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
2863                 _Innovation = _Innovation - Cm * Un
2864         #
2865         Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
2866         Xn = Xn_predicted + Kn * _Innovation
2867         Pn = Pn_predicted - Kn * Ht * Pn_predicted
2868         #
2869         Xa = Xn # Pointeurs
2870         #--------------------------
2871         selfA._setInternalState("Xn", Xn)
2872         selfA._setInternalState("Pn", Pn)
2873         #--------------------------
2874         #
2875         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2876         # ---> avec analysis
2877         selfA.StoredVariables["Analysis"].store( Xa )
2878         if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
2879             selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( Ht * Xa )
2880         if selfA._toStore("InnovationAtCurrentAnalysis"):
2881             selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
2882         # ---> avec current state
2883         if selfA._parameters["StoreInternalVariables"] \
2884             or selfA._toStore("CurrentState"):
2885             selfA.StoredVariables["CurrentState"].store( Xn )
2886         if selfA._toStore("ForecastState"):
2887             selfA.StoredVariables["ForecastState"].store( Xn_predicted )
2888         if selfA._toStore("ForecastCovariance"):
2889             selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted )
2890         if selfA._toStore("BMA"):
2891             selfA.StoredVariables["BMA"].store( Xn_predicted - Xa )
2892         if selfA._toStore("InnovationAtCurrentState"):
2893             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
2894         if selfA._toStore("SimulatedObservationAtCurrentState") \
2895             or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2896             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
2897         # ---> autres
2898         if selfA._parameters["StoreInternalVariables"] \
2899             or selfA._toStore("CostFunctionJ") \
2900             or selfA._toStore("CostFunctionJb") \
2901             or selfA._toStore("CostFunctionJo") \
2902             or selfA._toStore("CurrentOptimum") \
2903             or selfA._toStore("APosterioriCovariance"):
2904             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
2905             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
2906             J   = Jb + Jo
2907             selfA.StoredVariables["CostFunctionJb"].store( Jb )
2908             selfA.StoredVariables["CostFunctionJo"].store( Jo )
2909             selfA.StoredVariables["CostFunctionJ" ].store( J )
2910             #
2911             if selfA._toStore("IndexOfOptimum") \
2912                 or selfA._toStore("CurrentOptimum") \
2913                 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
2914                 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
2915                 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
2916                 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2917                 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
2918             if selfA._toStore("IndexOfOptimum"):
2919                 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
2920             if selfA._toStore("CurrentOptimum"):
2921                 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
2922             if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
2923                 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
2924             if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
2925                 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
2926             if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
2927                 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
2928             if selfA._toStore("CostFunctionJAtCurrentOptimum"):
2929                 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
2930         if selfA._toStore("APosterioriCovariance"):
2931             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
2932         if selfA._parameters["EstimationOf"] == "Parameters" \
2933             and J < previousJMinimum:
2934             previousJMinimum    = J
2935             XaMin               = Xa
2936             if selfA._toStore("APosterioriCovariance"):
2937                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
2938     #
2939     # Stockage final supplémentaire de l'optimum en estimation de paramètres
2940     # ----------------------------------------------------------------------
2941     if selfA._parameters["EstimationOf"] == "Parameters":
2942         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
2943         selfA.StoredVariables["Analysis"].store( XaMin )
2944         if selfA._toStore("APosterioriCovariance"):
2945             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
2946         if selfA._toStore("BMA"):
2947             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
2948     #
2949     return 0
2950
2951 # ==============================================================================
2952 def std3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
2953     """
2954     3DVAR
2955     """
2956     #
2957     # Initialisations
2958     # ---------------
2959     #
2960     # Opérateurs
2961     Hm = HO["Direct"].appliedTo
2962     Ha = HO["Adjoint"].appliedInXTo
2963     #
2964     # Utilisation éventuelle d'un vecteur H(Xb) précalculé
2965     if HO["AppliedInX"] is not None and "HXb" in HO["AppliedInX"]:
2966         HXb = Hm( Xb, HO["AppliedInX"]["HXb"] )
2967     else:
2968         HXb = Hm( Xb )
2969     HXb = numpy.asmatrix(numpy.ravel( HXb )).T
2970     if Y.size != HXb.size:
2971         raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
2972     if max(Y.shape) != max(HXb.shape):
2973         raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
2974     #
2975     if selfA._toStore("JacobianMatrixAtBackground"):
2976         HtMb = HO["Tangent"].asMatrix(ValueForMethodForm = Xb)
2977         HtMb = HtMb.reshape(Y.size,Xb.size) # ADAO & check shape
2978         selfA.StoredVariables["JacobianMatrixAtBackground"].store( HtMb )
2979     #
2980     # Précalcul des inversions de B et R
2981     BI = B.getI()
2982     RI = R.getI()
2983     #
2984     # Point de démarrage de l'optimisation
2985     Xini = selfA._parameters["InitializationPoint"]
2986     #
2987     # Définition de la fonction-coût
2988     # ------------------------------
2989     def CostFunction(x):
2990         _X  = numpy.asmatrix(numpy.ravel( x )).T
2991         if selfA._parameters["StoreInternalVariables"] or \
2992             selfA._toStore("CurrentState") or \
2993             selfA._toStore("CurrentOptimum"):
2994             selfA.StoredVariables["CurrentState"].store( _X )
2995         _HX = Hm( _X )
2996         _HX = numpy.asmatrix(numpy.ravel( _HX )).T
2997         _Innovation = Y - _HX
2998         if selfA._toStore("SimulatedObservationAtCurrentState") or \
2999             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3000             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
3001         if selfA._toStore("InnovationAtCurrentState"):
3002             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
3003         #
3004         Jb  = float( 0.5 * (_X - Xb).T * BI * (_X - Xb) )
3005         Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
3006         J   = Jb + Jo
3007         #
3008         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
3009         selfA.StoredVariables["CostFunctionJb"].store( Jb )
3010         selfA.StoredVariables["CostFunctionJo"].store( Jo )
3011         selfA.StoredVariables["CostFunctionJ" ].store( J )
3012         if selfA._toStore("IndexOfOptimum") or \
3013             selfA._toStore("CurrentOptimum") or \
3014             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
3015             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
3016             selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
3017             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3018             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3019         if selfA._toStore("IndexOfOptimum"):
3020             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3021         if selfA._toStore("CurrentOptimum"):
3022             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
3023         if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3024             selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
3025         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3026             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3027         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3028             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3029         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3030             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3031         return J
3032     #
3033     def GradientOfCostFunction(x):
3034         _X      = numpy.asmatrix(numpy.ravel( x )).T
3035         _HX     = Hm( _X )
3036         _HX     = numpy.asmatrix(numpy.ravel( _HX )).T
3037         GradJb  = BI * (_X - Xb)
3038         GradJo  = - Ha( (_X, RI * (Y - _HX)) )
3039         GradJ   = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
3040         return GradJ
3041     #
3042     # Minimisation de la fonctionnelle
3043     # --------------------------------
3044     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
3045     #
3046     if selfA._parameters["Minimizer"] == "LBFGSB":
3047         if "0.19" <= scipy.version.version <= "1.1.0":
3048             import lbfgsbhlt as optimiseur
3049         else:
3050             import scipy.optimize as optimiseur
3051         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
3052             func        = CostFunction,
3053             x0          = Xini,
3054             fprime      = GradientOfCostFunction,
3055             args        = (),
3056             bounds      = selfA._parameters["Bounds"],
3057             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
3058             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
3059             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3060             iprint      = selfA._parameters["optiprint"],
3061             )
3062         nfeval = Informations['funcalls']
3063         rc     = Informations['warnflag']
3064     elif selfA._parameters["Minimizer"] == "TNC":
3065         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
3066             func        = CostFunction,
3067             x0          = Xini,
3068             fprime      = GradientOfCostFunction,
3069             args        = (),
3070             bounds      = selfA._parameters["Bounds"],
3071             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
3072             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3073             ftol        = selfA._parameters["CostDecrementTolerance"],
3074             messages    = selfA._parameters["optmessages"],
3075             )
3076     elif selfA._parameters["Minimizer"] == "CG":
3077         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
3078             f           = CostFunction,
3079             x0          = Xini,
3080             fprime      = GradientOfCostFunction,
3081             args        = (),
3082             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3083             gtol        = selfA._parameters["GradientNormTolerance"],
3084             disp        = selfA._parameters["optdisp"],
3085             full_output = True,
3086             )
3087     elif selfA._parameters["Minimizer"] == "NCG":
3088         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
3089             f           = CostFunction,
3090             x0          = Xini,
3091             fprime      = GradientOfCostFunction,
3092             args        = (),
3093             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3094             avextol     = selfA._parameters["CostDecrementTolerance"],
3095             disp        = selfA._parameters["optdisp"],
3096             full_output = True,
3097             )
3098     elif selfA._parameters["Minimizer"] == "BFGS":
3099         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
3100             f           = CostFunction,
3101             x0          = Xini,
3102             fprime      = GradientOfCostFunction,
3103             args        = (),
3104             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3105             gtol        = selfA._parameters["GradientNormTolerance"],
3106             disp        = selfA._parameters["optdisp"],
3107             full_output = True,
3108             )
3109     else:
3110         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
3111     #
3112     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3113     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
3114     #
3115     # Correction pour pallier a un bug de TNC sur le retour du Minimum
3116     # ----------------------------------------------------------------
3117     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
3118         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
3119     #
3120     # Obtention de l'analyse
3121     # ----------------------
3122     Xa = numpy.asmatrix(numpy.ravel( Minimum )).T
3123     #
3124     selfA.StoredVariables["Analysis"].store( Xa )
3125     #
3126     if selfA._toStore("OMA") or \
3127         selfA._toStore("SigmaObs2") or \
3128         selfA._toStore("SimulationQuantiles") or \
3129         selfA._toStore("SimulatedObservationAtOptimum"):
3130         if selfA._toStore("SimulatedObservationAtCurrentState"):
3131             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
3132         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3133             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
3134         else:
3135             HXa = Hm( Xa )
3136     #
3137     # Calcul de la covariance d'analyse
3138     # ---------------------------------
3139     if selfA._toStore("APosterioriCovariance") or \
3140         selfA._toStore("SimulationQuantiles") or \
3141         selfA._toStore("JacobianMatrixAtOptimum") or \
3142         selfA._toStore("KalmanGainAtOptimum"):
3143         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
3144         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
3145     if selfA._toStore("APosterioriCovariance") or \
3146         selfA._toStore("SimulationQuantiles") or \
3147         selfA._toStore("KalmanGainAtOptimum"):
3148         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
3149         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
3150     if selfA._toStore("APosterioriCovariance") or \
3151         selfA._toStore("SimulationQuantiles"):
3152         HessienneI = []
3153         nb = Xa.size
3154         for i in range(nb):
3155             _ee    = numpy.matrix(numpy.zeros(nb)).T
3156             _ee[i] = 1.
3157             _HtEE  = numpy.dot(HtM,_ee)
3158             _HtEE  = numpy.asmatrix(numpy.ravel( _HtEE )).T
3159             HessienneI.append( numpy.ravel( BI*_ee + HaM * (RI * _HtEE) ) )
3160         HessienneI = numpy.matrix( HessienneI )
3161         A = HessienneI.I
3162         if min(A.shape) != max(A.shape):
3163             raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(selfA._name,str(A.shape)))
3164         if (numpy.diag(A) < 0).any():
3165             raise ValueError("The %s a posteriori covariance matrix A has at least one negative value on its diagonal. There is an error in the observation operator, please check it."%(selfA._name,))
3166         if logging.getLogger().level < logging.WARNING: # La verification n'a lieu qu'en debug
3167             try:
3168                 L = numpy.linalg.cholesky( A )
3169             except:
3170                 raise ValueError("The %s a posteriori covariance matrix A is not symmetric positive-definite. Please check your a priori covariances and your observation operator."%(selfA._name,))
3171     if selfA._toStore("APosterioriCovariance"):
3172         selfA.StoredVariables["APosterioriCovariance"].store( A )
3173     if selfA._toStore("JacobianMatrixAtOptimum"):
3174         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
3175     if selfA._toStore("KalmanGainAtOptimum"):
3176         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
3177         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
3178         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
3179     #
3180     # Calculs et/ou stockages supplémentaires
3181     # ---------------------------------------
3182     if selfA._toStore("Innovation") or \
3183         selfA._toStore("SigmaObs2") or \
3184         selfA._toStore("MahalanobisConsistency") or \
3185         selfA._toStore("OMB"):
3186         d  = Y - HXb
3187     if selfA._toStore("Innovation"):
3188         selfA.StoredVariables["Innovation"].store( numpy.ravel(d) )
3189     if selfA._toStore("BMA"):
3190         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
3191     if selfA._toStore("OMA"):
3192         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
3193     if selfA._toStore("OMB"):
3194         selfA.StoredVariables["OMB"].store( numpy.ravel(d) )
3195     if selfA._toStore("SigmaObs2"):
3196         TraceR = R.trace(Y.size)
3197         selfA.StoredVariables["SigmaObs2"].store( float( (d.T * (numpy.asmatrix(numpy.ravel(Y)).T-numpy.asmatrix(numpy.ravel(HXa)).T)) ) / TraceR )
3198     if selfA._toStore("MahalanobisConsistency"):
3199         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
3200     if selfA._toStore("SimulationQuantiles"):
3201         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
3202     if selfA._toStore("SimulatedObservationAtBackground"):
3203         selfA.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
3204     if selfA._toStore("SimulatedObservationAtOptimum"):
3205         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
3206     #
3207     return 0
3208
3209 # ==============================================================================
3210 def std4dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3211     """
3212     4DVAR
3213     """
3214     #
3215     # Initialisations
3216     # ---------------
3217     #
3218     # Opérateurs
3219     Hm = HO["Direct"].appliedControledFormTo
3220     Mm = EM["Direct"].appliedControledFormTo
3221     #
3222     if CM is not None and "Tangent" in CM and U is not None:
3223         Cm = CM["Tangent"].asMatrix(Xb)
3224     else:
3225         Cm = None
3226     #
3227     def Un(_step):
3228         if U is not None:
3229             if hasattr(U,"store") and 1<=_step<len(U) :
3230                 _Un = numpy.asmatrix(numpy.ravel( U[_step] )).T
3231             elif hasattr(U,"store") and len(U)==1:
3232                 _Un = numpy.asmatrix(numpy.ravel( U[0] )).T
3233             else:
3234                 _Un = numpy.asmatrix(numpy.ravel( U )).T
3235         else:
3236             _Un = None
3237         return _Un
3238     def CmUn(_xn,_un):
3239         if Cm is not None and _un is not None: # Attention : si Cm est aussi dans M, doublon !
3240             _Cm   = Cm.reshape(_xn.size,_un.size) # ADAO & check shape
3241             _CmUn = _Cm * _un
3242         else:
3243             _CmUn = 0.
3244         return _CmUn
3245     #
3246     # Remarque : les observations sont exploitées à partir du pas de temps
3247     # numéro 1, et sont utilisées dans Yo comme rangées selon ces indices.
3248     # Donc le pas 0 n'est pas utilisé puisque la première étape commence
3249     # avec l'observation du pas 1.
3250     #
3251     # Nombre de pas identique au nombre de pas d'observations
3252     if hasattr(Y,"stepnumber"):
3253         duration = Y.stepnumber()
3254     else:
3255         duration = 2
3256     #
3257     # Précalcul des inversions de B et R
3258     BI = B.getI()
3259     RI = R.getI()
3260     #
3261     # Point de démarrage de l'optimisation
3262     Xini = selfA._parameters["InitializationPoint"]
3263     #
3264     # Définition de la fonction-coût
3265     # ------------------------------
3266     selfA.DirectCalculation = [None,] # Le pas 0 n'est pas observé
3267     selfA.DirectInnovation  = [None,] # Le pas 0 n'est pas observé
3268     def CostFunction(x):
3269         _X  = numpy.asmatrix(numpy.ravel( x )).T
3270         if selfA._parameters["StoreInternalVariables"] or \
3271             selfA._toStore("CurrentState") or \
3272             selfA._toStore("CurrentOptimum"):
3273             selfA.StoredVariables["CurrentState"].store( _X )
3274         Jb  = float( 0.5 * (_X - Xb).T * BI * (_X - Xb) )
3275         selfA.DirectCalculation = [None,]
3276         selfA.DirectInnovation  = [None,]
3277         Jo  = 0.
3278         _Xn = _X
3279         for step in range(0,duration-1):
3280             if hasattr(Y,"store"):
3281                 _Ynpu = numpy.asmatrix(numpy.ravel( Y[step+1] )).T
3282             else:
3283                 _Ynpu = numpy.asmatrix(numpy.ravel( Y )).T
3284             _Un = Un(step)
3285             #
3286             # Etape d'évolution
3287             if selfA._parameters["EstimationOf"] == "State":
3288                 _Xn = Mm( (_Xn, _Un) ) + CmUn(_Xn, _Un)
3289             elif selfA._parameters["EstimationOf"] == "Parameters":
3290                 pass
3291             #
3292             if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3293                 _Xn = numpy.max(numpy.hstack((_Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
3294                 _Xn = numpy.min(numpy.hstack((_Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
3295             #
3296             # Etape de différence aux observations
3297             if selfA._parameters["EstimationOf"] == "State":
3298                 _YmHMX = _Ynpu - numpy.asmatrix(numpy.ravel( Hm( (_Xn, None) ) )).T
3299             elif selfA._parameters["EstimationOf"] == "Parameters":
3300                 _YmHMX = _Ynpu - numpy.asmatrix(numpy.ravel( Hm( (_Xn, _Un) ) )).T - CmUn(_Xn, _Un)
3301             #
3302             # Stockage de l'état
3303             selfA.DirectCalculation.append( _Xn )
3304             selfA.DirectInnovation.append( _YmHMX )
3305             #
3306             # Ajout dans la fonctionnelle d'observation
3307             Jo = Jo + 0.5 * float( _YmHMX.T * RI * _YmHMX )
3308         J = Jb + Jo
3309         #
3310         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
3311         selfA.StoredVariables["CostFunctionJb"].store( Jb )
3312         selfA.StoredVariables["CostFunctionJo"].store( Jo )
3313         selfA.StoredVariables["CostFunctionJ" ].store( J )
3314         if selfA._toStore("IndexOfOptimum") or \
3315             selfA._toStore("CurrentOptimum") or \
3316             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
3317             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
3318             selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3319             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3320         if selfA._toStore("IndexOfOptimum"):
3321             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3322         if selfA._toStore("CurrentOptimum"):
3323             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
3324         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3325             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3326         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3327             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3328         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3329             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3330         return J
3331     #
3332     def GradientOfCostFunction(x):
3333         _X      = numpy.asmatrix(numpy.ravel( x )).T
3334         GradJb  = BI * (_X - Xb)
3335         GradJo  = 0.
3336         for step in range(duration-1,0,-1):
3337             # Étape de récupération du dernier stockage de l'évolution
3338             _Xn = selfA.DirectCalculation.pop()
3339             # Étape de récupération du dernier stockage de l'innovation
3340             _YmHMX = selfA.DirectInnovation.pop()
3341             # Calcul des adjoints
3342             Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = _Xn)
3343             Ha = Ha.reshape(_Xn.size,_YmHMX.size) # ADAO & check shape
3344             Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = _Xn)
3345             Ma = Ma.reshape(_Xn.size,_Xn.size) # ADAO & check shape
3346             # Calcul du gradient par état adjoint
3347             GradJo = GradJo + Ha * RI * _YmHMX # Équivaut pour Ha linéaire à : Ha( (_Xn, RI * _YmHMX) )
3348             GradJo = Ma * GradJo               # Équivaut pour Ma linéaire à : Ma( (_Xn, GradJo) )
3349         GradJ = numpy.ravel( GradJb ) - numpy.ravel( GradJo )
3350         return GradJ
3351     #
3352     # Minimisation de la fonctionnelle
3353     # --------------------------------
3354     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
3355     #
3356     if selfA._parameters["Minimizer"] == "LBFGSB":
3357         if "0.19" <= scipy.version.version <= "1.1.0":
3358             import lbfgsbhlt as optimiseur
3359         else:
3360             import scipy.optimize as optimiseur
3361         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
3362             func        = CostFunction,
3363             x0          = Xini,
3364             fprime      = GradientOfCostFunction,
3365             args        = (),
3366             bounds      = selfA._parameters["Bounds"],
3367             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
3368             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
3369             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3370             iprint      = selfA._parameters["optiprint"],
3371             )
3372         nfeval = Informations['funcalls']
3373         rc     = Informations['warnflag']
3374     elif selfA._parameters["Minimizer"] == "TNC":
3375         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
3376             func        = CostFunction,
3377             x0          = Xini,
3378             fprime      = GradientOfCostFunction,
3379             args        = (),
3380             bounds      = selfA._parameters["Bounds"],
3381             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
3382             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3383             ftol        = selfA._parameters["CostDecrementTolerance"],
3384             messages    = selfA._parameters["optmessages"],
3385             )
3386     elif selfA._parameters["Minimizer"] == "CG":
3387         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
3388             f           = CostFunction,
3389             x0          = Xini,
3390             fprime      = GradientOfCostFunction,
3391             args        = (),
3392             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3393             gtol        = selfA._parameters["GradientNormTolerance"],
3394             disp        = selfA._parameters["optdisp"],
3395             full_output = True,
3396             )
3397     elif selfA._parameters["Minimizer"] == "NCG":
3398         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
3399             f           = CostFunction,
3400             x0          = Xini,
3401             fprime      = GradientOfCostFunction,
3402             args        = (),
3403             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3404             avextol     = selfA._parameters["CostDecrementTolerance"],
3405             disp        = selfA._parameters["optdisp"],
3406             full_output = True,
3407             )
3408     elif selfA._parameters["Minimizer"] == "BFGS":
3409         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
3410             f           = CostFunction,
3411             x0          = Xini,
3412             fprime      = GradientOfCostFunction,
3413             args        = (),
3414             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3415             gtol        = selfA._parameters["GradientNormTolerance"],
3416             disp        = selfA._parameters["optdisp"],
3417             full_output = True,
3418             )
3419     else:
3420         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
3421     #
3422     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3423     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
3424     #
3425     # Correction pour pallier a un bug de TNC sur le retour du Minimum
3426     # ----------------------------------------------------------------
3427     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
3428         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
3429     #
3430     # Obtention de l'analyse
3431     # ----------------------
3432     Xa = numpy.asmatrix(numpy.ravel( Minimum )).T
3433     #
3434     selfA.StoredVariables["Analysis"].store( Xa )
3435     #
3436     # Calculs et/ou stockages supplémentaires
3437     # ---------------------------------------
3438     if selfA._toStore("BMA"):
3439         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
3440     #
3441     return 0
3442
3443 # ==============================================================================
3444 def uckf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3445     """
3446     Unscented Kalman Filter
3447     """
3448     if selfA._parameters["EstimationOf"] == "Parameters":
3449         selfA._parameters["StoreInternalVariables"] = True
3450     #
3451     L     = Xb.size
3452     Alpha = selfA._parameters["Alpha"]
3453     Beta  = selfA._parameters["Beta"]
3454     if selfA._parameters["Kappa"] == 0:
3455         if selfA._parameters["EstimationOf"] == "State":
3456             Kappa = 0
3457         elif selfA._parameters["EstimationOf"] == "Parameters":
3458             Kappa = 3 - L
3459     else:
3460         Kappa = selfA._parameters["Kappa"]
3461     Lambda = float( Alpha**2 ) * ( L + Kappa ) - L
3462     Gamma  = math.sqrt( L + Lambda )
3463     #
3464     Ww = []
3465     Ww.append( 0. )
3466     for i in range(2*L):
3467         Ww.append( 1. / (2.*(L + Lambda)) )
3468     #
3469     Wm = numpy.array( Ww )
3470     Wm[0] = Lambda / (L + Lambda)
3471     Wc = numpy.array( Ww )
3472     Wc[0] = Lambda / (L + Lambda) + (1. - Alpha**2 + Beta)
3473     #
3474     # Opérateurs
3475     Hm = HO["Direct"].appliedControledFormTo
3476     #
3477     if selfA._parameters["EstimationOf"] == "State":
3478         Mm = EM["Direct"].appliedControledFormTo
3479     #
3480     if CM is not None and "Tangent" in CM and U is not None:
3481         Cm = CM["Tangent"].asMatrix(Xb)
3482     else:
3483         Cm = None
3484     #
3485     # Durée d'observation et tailles
3486     if hasattr(Y,"stepnumber"):
3487         duration = Y.stepnumber()
3488         __p = numpy.cumprod(Y.shape())[-1]
3489     else:
3490         duration = 2
3491         __p = numpy.array(Y).size
3492     #
3493     # Précalcul des inversions de B et R
3494     if selfA._parameters["StoreInternalVariables"] \
3495         or selfA._toStore("CostFunctionJ") \
3496         or selfA._toStore("CostFunctionJb") \
3497         or selfA._toStore("CostFunctionJo"):
3498         BI = B.getI()
3499         RI = R.getI()
3500     #
3501     __n = Xb.size
3502     #
3503     if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
3504         Xn = Xb
3505         if hasattr(B,"asfullmatrix"):
3506             Pn = B.asfullmatrix(__n)
3507         else:
3508             Pn = B
3509         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3510         selfA.StoredVariables["Analysis"].store( Xb )
3511         if selfA._toStore("APosterioriCovariance"):
3512             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
3513     elif selfA._parameters["nextStep"]:
3514         Xn = selfA._getInternalState("Xn")
3515         Pn = selfA._getInternalState("Pn")
3516     #
3517     if selfA._parameters["EstimationOf"] == "Parameters":
3518         XaMin            = Xn
3519         previousJMinimum = numpy.finfo(float).max
3520     #
3521     for step in range(duration-1):
3522         if hasattr(Y,"store"):
3523             Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
3524         else:
3525             Ynpu = numpy.ravel( Y ).reshape((__p,1))
3526         #
3527         if U is not None:
3528             if hasattr(U,"store") and len(U)>1:
3529                 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
3530             elif hasattr(U,"store") and len(U)==1:
3531                 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
3532             else:
3533                 Un = numpy.asmatrix(numpy.ravel( U )).T
3534         else:
3535             Un = None
3536         #
3537         Pndemi = numpy.linalg.cholesky(Pn)
3538         Xnp = numpy.hstack([Xn, Xn+Gamma*Pndemi, Xn-Gamma*Pndemi])
3539         nbSpts = 2*Xn.size+1
3540         #
3541         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3542             for point in range(nbSpts):
3543                 Xnp[:,point] = numpy.max(numpy.hstack((Xnp[:,point],numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
3544                 Xnp[:,point] = numpy.min(numpy.hstack((Xnp[:,point],numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
3545         #
3546         XEtnnp = []
3547         for point in range(nbSpts):
3548             if selfA._parameters["EstimationOf"] == "State":
3549                 XEtnnpi = numpy.asmatrix(numpy.ravel( Mm( (Xnp[:,point], Un) ) )).T
3550                 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
3551                     Cm = Cm.reshape(Xn.size,Un.size) # ADAO & check shape
3552                     XEtnnpi = XEtnnpi + Cm * Un
3553                 if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3554                     XEtnnpi = numpy.max(numpy.hstack((XEtnnpi,numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
3555                     XEtnnpi = numpy.min(numpy.hstack((XEtnnpi,numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
3556             elif selfA._parameters["EstimationOf"] == "Parameters":
3557                 # --- > Par principe, M = Id, Q = 0
3558                 XEtnnpi = Xnp[:,point]
3559             XEtnnp.append( XEtnnpi )
3560         XEtnnp = numpy.hstack( XEtnnp )
3561         #
3562         Xncm = numpy.matrix( XEtnnp.getA()*numpy.array(Wm) ).sum(axis=1)
3563         #
3564         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3565             Xncm = numpy.max(numpy.hstack((Xncm,numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
3566             Xncm = numpy.min(numpy.hstack((Xncm,numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
3567         #
3568         if selfA._parameters["EstimationOf"] == "State":        Pnm = Q
3569         elif selfA._parameters["EstimationOf"] == "Parameters": Pnm = 0.
3570         for point in range(nbSpts):
3571             Pnm += Wc[i] * (XEtnnp[:,point]-Xncm) * (XEtnnp[:,point]-Xncm).T
3572         #
3573         if selfA._parameters["EstimationOf"] == "Parameters" and selfA._parameters["Bounds"] is not None:
3574             Pnmdemi = selfA._parameters["Reconditioner"] * numpy.linalg.cholesky(Pnm)
3575         else:
3576             Pnmdemi = numpy.linalg.cholesky(Pnm)
3577         #
3578         Xnnp = numpy.hstack([Xncm, Xncm+Gamma*Pnmdemi, Xncm-Gamma*Pnmdemi])
3579         #
3580         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3581             for point in range(nbSpts):
3582                 Xnnp[:,point] = numpy.max(numpy.hstack((Xnnp[:,point],numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
3583                 Xnnp[:,point] = numpy.min(numpy.hstack((Xnnp[:,point],numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
3584         #
3585         Ynnp = []
3586         for point in range(nbSpts):
3587             if selfA._parameters["EstimationOf"] == "State":
3588                 Ynnpi = numpy.asmatrix(numpy.ravel( Hm( (Xnnp[:,point], None) ) )).T
3589             elif selfA._parameters["EstimationOf"] == "Parameters":
3590                 Ynnpi = numpy.asmatrix(numpy.ravel( Hm( (Xnnp[:,point], Un) ) )).T
3591             Ynnp.append( Ynnpi )
3592         Ynnp = numpy.hstack( Ynnp )
3593         #
3594         Yncm = numpy.matrix( Ynnp.getA()*numpy.array(Wm) ).sum(axis=1)
3595         #
3596         Pyyn = R
3597         Pxyn = 0.
3598         for point in range(nbSpts):
3599             Pyyn += Wc[i] * (Ynnp[:,point]-Yncm) * (Ynnp[:,point]-Yncm).T
3600             Pxyn += Wc[i] * (Xnnp[:,point]-Xncm) * (Ynnp[:,point]-Yncm).T
3601         #
3602         _Innovation  = Ynpu - Yncm
3603         if selfA._parameters["EstimationOf"] == "Parameters":
3604             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
3605                 _Innovation = _Innovation - Cm * Un
3606         #
3607         Kn = Pxyn * Pyyn.I
3608         Xn = Xncm + Kn * _Innovation
3609         Pn = Pnm - Kn * Pyyn * Kn.T
3610         #
3611         if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
3612             Xn = numpy.max(numpy.hstack((Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
3613             Xn = numpy.min(numpy.hstack((Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
3614         #
3615         Xa = Xn # Pointeurs
3616         #--------------------------
3617         selfA._setInternalState("Xn", Xn)
3618         selfA._setInternalState("Pn", Pn)
3619         #--------------------------
3620         #
3621         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3622         # ---> avec analysis
3623         selfA.StoredVariables["Analysis"].store( Xa )
3624         # ---> avec current state
3625         if selfA._parameters["StoreInternalVariables"] \
3626             or selfA._toStore("CurrentState"):
3627             selfA.StoredVariables["CurrentState"].store( Xn )
3628         if selfA._toStore("InnovationAtCurrentState"):
3629             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
3630         if selfA._parameters["StoreInternalVariables"] \
3631             or selfA._toStore("CostFunctionJ") \
3632             or selfA._toStore("CostFunctionJb") \
3633             or selfA._toStore("CostFunctionJo"):
3634             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
3635             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
3636             J   = Jb + Jo
3637             selfA.StoredVariables["CostFunctionJb"].store( Jb )
3638             selfA.StoredVariables["CostFunctionJo"].store( Jo )
3639             selfA.StoredVariables["CostFunctionJ" ].store( J )
3640         if selfA._toStore("APosterioriCovariance"):
3641             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
3642         if selfA._parameters["EstimationOf"] == "Parameters" \
3643             and J < previousJMinimum:
3644             previousJMinimum    = J
3645             XaMin               = Xa
3646             if selfA._toStore("APosterioriCovariance"):
3647                 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
3648     #
3649     # Stockage final supplémentaire de l'optimum en estimation de paramètres
3650     # ----------------------------------------------------------------------
3651     if selfA._parameters["EstimationOf"] == "Parameters":
3652         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
3653         selfA.StoredVariables["Analysis"].store( XaMin )
3654         if selfA._toStore("APosterioriCovariance"):
3655             selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
3656         if selfA._toStore("BMA"):
3657             selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
3658     #
3659     return 0
3660
3661 # ==============================================================================
3662 def van3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
3663     """
3664     3DVAR variational analysis with no inversion of B
3665     """
3666     #
3667     # Initialisations
3668     # ---------------
3669     #
3670     # Opérateurs
3671     Hm = HO["Direct"].appliedTo
3672     Ha = HO["Adjoint"].appliedInXTo
3673     #
3674     # Précalcul des inversions de B et R
3675     BT = B.getT()
3676     RI = R.getI()
3677     #
3678     # Point de démarrage de l'optimisation
3679     Xini = numpy.zeros(Xb.shape)
3680     #
3681     # Définition de la fonction-coût
3682     # ------------------------------
3683     def CostFunction(v):
3684         _V = numpy.asmatrix(numpy.ravel( v )).T
3685         _X = Xb + B * _V
3686         if selfA._parameters["StoreInternalVariables"] or \
3687             selfA._toStore("CurrentState") or \
3688             selfA._toStore("CurrentOptimum"):
3689             selfA.StoredVariables["CurrentState"].store( _X )
3690         _HX = Hm( _X )
3691         _HX = numpy.asmatrix(numpy.ravel( _HX )).T
3692         _Innovation = Y - _HX
3693         if selfA._toStore("SimulatedObservationAtCurrentState") or \
3694             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3695             selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
3696         if selfA._toStore("InnovationAtCurrentState"):
3697             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
3698         #
3699         Jb  = float( 0.5 * _V.T * BT * _V )
3700         Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
3701         J   = Jb + Jo
3702         #
3703         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["CostFunctionJ"]) )
3704         selfA.StoredVariables["CostFunctionJb"].store( Jb )
3705         selfA.StoredVariables["CostFunctionJo"].store( Jo )
3706         selfA.StoredVariables["CostFunctionJ" ].store( J )
3707         if selfA._toStore("IndexOfOptimum") or \
3708             selfA._toStore("CurrentOptimum") or \
3709             selfA._toStore("CostFunctionJAtCurrentOptimum") or \
3710             selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
3711             selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
3712             selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3713             IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3714         if selfA._toStore("IndexOfOptimum"):
3715             selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
3716         if selfA._toStore("CurrentOptimum"):
3717             selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["CurrentState"][IndexMin] )
3718         if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3719             selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
3720         if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
3721             selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
3722         if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
3723             selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
3724         if selfA._toStore("CostFunctionJAtCurrentOptimum"):
3725             selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
3726         return J
3727     #
3728     def GradientOfCostFunction(v):
3729         _V = numpy.asmatrix(numpy.ravel( v )).T
3730         _X = Xb + B * _V
3731         _HX     = Hm( _X )
3732         _HX     = numpy.asmatrix(numpy.ravel( _HX )).T
3733         GradJb  = BT * _V
3734         GradJo  = - Ha( (_X, RI * (Y - _HX)) )
3735         GradJ   = numpy.ravel( GradJb ) + numpy.ravel( GradJo )
3736         return GradJ
3737     #
3738     # Minimisation de la fonctionnelle
3739     # --------------------------------
3740     nbPreviousSteps = selfA.StoredVariables["CostFunctionJ"].stepnumber()
3741     #
3742     if selfA._parameters["Minimizer"] == "LBFGSB":
3743         if "0.19" <= scipy.version.version <= "1.1.0":
3744             import lbfgsbhlt as optimiseur
3745         else:
3746             import scipy.optimize as optimiseur
3747         Minimum, J_optimal, Informations = optimiseur.fmin_l_bfgs_b(
3748             func        = CostFunction,
3749             x0          = Xini,
3750             fprime      = GradientOfCostFunction,
3751             args        = (),
3752             bounds      = selfA._parameters["Bounds"],
3753             maxfun      = selfA._parameters["MaximumNumberOfSteps"]-1,
3754             factr       = selfA._parameters["CostDecrementTolerance"]*1.e14,
3755             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3756             iprint      = selfA._parameters["optiprint"],
3757             )
3758         nfeval = Informations['funcalls']
3759         rc     = Informations['warnflag']
3760     elif selfA._parameters["Minimizer"] == "TNC":
3761         Minimum, nfeval, rc = scipy.optimize.fmin_tnc(
3762             func        = CostFunction,
3763             x0          = Xini,
3764             fprime      = GradientOfCostFunction,
3765             args        = (),
3766             bounds      = selfA._parameters["Bounds"],
3767             maxfun      = selfA._parameters["MaximumNumberOfSteps"],
3768             pgtol       = selfA._parameters["ProjectedGradientTolerance"],
3769             ftol        = selfA._parameters["CostDecrementTolerance"],
3770             messages    = selfA._parameters["optmessages"],
3771             )
3772     elif selfA._parameters["Minimizer"] == "CG":
3773         Minimum, fopt, nfeval, grad_calls, rc = scipy.optimize.fmin_cg(
3774             f           = CostFunction,
3775             x0          = Xini,
3776             fprime      = GradientOfCostFunction,
3777             args        = (),
3778             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3779             gtol        = selfA._parameters["GradientNormTolerance"],
3780             disp        = selfA._parameters["optdisp"],
3781             full_output = True,
3782             )
3783     elif selfA._parameters["Minimizer"] == "NCG":
3784         Minimum, fopt, nfeval, grad_calls, hcalls, rc = scipy.optimize.fmin_ncg(
3785             f           = CostFunction,
3786             x0          = Xini,
3787             fprime      = GradientOfCostFunction,
3788             args        = (),
3789             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3790             avextol     = selfA._parameters["CostDecrementTolerance"],
3791             disp        = selfA._parameters["optdisp"],
3792             full_output = True,
3793             )
3794     elif selfA._parameters["Minimizer"] == "BFGS":
3795         Minimum, fopt, gopt, Hopt, nfeval, grad_calls, rc = scipy.optimize.fmin_bfgs(
3796             f           = CostFunction,
3797             x0          = Xini,
3798             fprime      = GradientOfCostFunction,
3799             args        = (),
3800             maxiter     = selfA._parameters["MaximumNumberOfSteps"],
3801             gtol        = selfA._parameters["GradientNormTolerance"],
3802             disp        = selfA._parameters["optdisp"],
3803             full_output = True,
3804             )
3805     else:
3806         raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"])
3807     #
3808     IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
3809     MinJ     = selfA.StoredVariables["CostFunctionJ"][IndexMin]
3810     #
3811     # Correction pour pallier a un bug de TNC sur le retour du Minimum
3812     # ----------------------------------------------------------------
3813     if selfA._parameters["StoreInternalVariables"] or selfA._toStore("CurrentState"):
3814         Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
3815         Minimum = numpy.asmatrix(numpy.ravel( Minimum )).T
3816     else:
3817         Minimum = Xb + B * numpy.asmatrix(numpy.ravel( Minimum )).T
3818     #
3819     # Obtention de l'analyse
3820     # ----------------------
3821     Xa = Minimum
3822     #
3823     selfA.StoredVariables["Analysis"].store( Xa )
3824     #
3825     if selfA._toStore("OMA") or \
3826         selfA._toStore("SigmaObs2") or \
3827         selfA._toStore("SimulationQuantiles") or \
3828         selfA._toStore("SimulatedObservationAtOptimum"):
3829         if selfA._toStore("SimulatedObservationAtCurrentState"):
3830             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
3831         elif selfA._toStore("SimulatedObservationAtCurrentOptimum"):
3832             HXa = selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
3833         else:
3834             HXa = Hm( Xa )
3835     #
3836     # Calcul de la covariance d'analyse
3837     # ---------------------------------
3838     if selfA._toStore("APosterioriCovariance") or \
3839         selfA._toStore("SimulationQuantiles") or \
3840         selfA._toStore("JacobianMatrixAtOptimum") or \
3841         selfA._toStore("KalmanGainAtOptimum"):
3842         HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
3843         HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
3844     if selfA._toStore("APosterioriCovariance") or \
3845         selfA._toStore("SimulationQuantiles") or \
3846         selfA._toStore("KalmanGainAtOptimum"):
3847         HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa)
3848         HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape
3849     if selfA._toStore("APosterioriCovariance") or \
3850         selfA._toStore("SimulationQuantiles"):
3851         BI = B.getI()
3852         HessienneI = []
3853         nb = Xa.size
3854         for i in range(nb):
3855             _ee    = numpy.matrix(numpy.zeros(nb)).T
3856             _ee[i] = 1.
3857             _HtEE  = numpy.dot(HtM,_ee)
3858             _HtEE  = numpy.asmatrix(numpy.ravel( _HtEE )).T
3859             HessienneI.append( numpy.ravel( BI*_ee + HaM * (RI * _HtEE) ) )
3860         HessienneI = numpy.matrix( HessienneI )
3861         A = HessienneI.I
3862         if min(A.shape) != max(A.shape):
3863             raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(selfA._name,str(A.shape)))
3864         if (numpy.diag(A) < 0).any():
3865             raise ValueError("The %s a posteriori covariance matrix A has at least one negative value on its diagonal. There is an error in the observation operator, please check it."%(selfA._name,))
3866         if logging.getLogger().level < logging.WARNING: # La verification n'a lieu qu'en debug
3867             try:
3868                 L = numpy.linalg.cholesky( A )
3869             except:
3870                 raise ValueError("The %s a posteriori covariance matrix A is not symmetric positive-definite. Please check your a priori covariances and your observation operator."%(selfA._name,))
3871     if selfA._toStore("APosterioriCovariance"):
3872         selfA.StoredVariables["APosterioriCovariance"].store( A )
3873     if selfA._toStore("JacobianMatrixAtOptimum"):
3874         selfA.StoredVariables["JacobianMatrixAtOptimum"].store( HtM )
3875     if selfA._toStore("KalmanGainAtOptimum"):
3876         if   (Y.size <= Xb.size): KG  = B * HaM * (R + numpy.dot(HtM, B * HaM)).I
3877         elif (Y.size >  Xb.size): KG = (BI + numpy.dot(HaM, RI * HtM)).I * HaM * RI
3878         selfA.StoredVariables["KalmanGainAtOptimum"].store( KG )
3879     #
3880     # Calculs et/ou stockages supplémentaires
3881     # ---------------------------------------
3882     if selfA._toStore("Innovation") or \
3883         selfA._toStore("SigmaObs2") or \
3884         selfA._toStore("MahalanobisConsistency") or \
3885         selfA._toStore("OMB"):
3886         d  = Y - HXb
3887     if selfA._toStore("Innovation"):
3888         selfA.StoredVariables["Innovation"].store( numpy.ravel(d) )
3889     if selfA._toStore("BMA"):
3890         selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
3891     if selfA._toStore("OMA"):
3892         selfA.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
3893     if selfA._toStore("OMB"):
3894         selfA.StoredVariables["OMB"].store( numpy.ravel(d) )
3895     if selfA._toStore("SigmaObs2"):
3896         TraceR = R.trace(Y.size)
3897         selfA.StoredVariables["SigmaObs2"].store( float( (d.T * (numpy.asmatrix(numpy.ravel(Y)).T-numpy.asmatrix(numpy.ravel(HXa)).T)) ) / TraceR )
3898     if selfA._toStore("MahalanobisConsistency"):
3899         selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
3900     if selfA._toStore("SimulationQuantiles"):
3901         QuantilesEstimations(selfA, A, Xa, HXa, Hm, HtM)
3902     if selfA._toStore("SimulatedObservationAtBackground"):
3903         selfA.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
3904     if selfA._toStore("SimulatedObservationAtOptimum"):
3905         selfA.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
3906     #
3907     return 0
3908
3909 # ==============================================================================
3910 if __name__ == "__main__":
3911     print('\n AUTODIAGNOSTIC\n')