self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
if self._toStore("SimulatedObservationAtCurrentOptimum"):
self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
- if self._toStore("CostFunctionJAtCurrentOptimum"):
- self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
if self._toStore("CostFunctionJbAtCurrentOptimum"):
self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
if self._toStore("CostFunctionJoAtCurrentOptimum"):
self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
+ if self._toStore("CostFunctionJAtCurrentOptimum"):
+ self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
return J
#
def GradientOfCostFunction(x):
"APosterioriVariances",
"BMA",
"CostFunctionJ",
+ "CostFunctionJAtCurrentOptimum",
"CostFunctionJb",
+ "CostFunctionJbAtCurrentOptimum",
"CostFunctionJo",
+ "CostFunctionJoAtCurrentOptimum",
+ "CurrentOptimum",
"CurrentState",
"Innovation",
"MahalanobisConsistency",
"SigmaBck2",
"SigmaObs2",
"SimulatedObservationAtBackground",
+ "SimulatedObservationAtCurrentOptimum",
"SimulatedObservationAtCurrentState",
"SimulatedObservationAtOptimum",
"SimulationQuantiles",
# Calcul de la fonction coût
# --------------------------
if self._parameters["StoreInternalVariables"] or \
- self._toStore("CostFunctionJ") or \
+ self._toStore("CostFunctionJ") or self._toStore("CostFunctionJAtCurrentOptimum") or \
+ self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \
+ self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \
self._toStore("OMA") or \
self._toStore("SigmaObs2") or \
self._toStore("MahalanobisConsistency") or \
+ self._toStore("SimulatedObservationAtCurrentOptimum") or \
self._toStore("SimulatedObservationAtCurrentState") or \
self._toStore("SimulatedObservationAtOptimum") or \
self._toStore("SimulationQuantiles"):
HXa = Hm * Xa
oma = Y - HXa
if self._parameters["StoreInternalVariables"] or \
- self._toStore("CostFunctionJ") or \
+ self._toStore("CostFunctionJ") or self._toStore("CostFunctionJAtCurrentOptimum") or \
+ self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \
+ self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \
self._toStore("MahalanobisConsistency"):
Jb = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
Jo = float( 0.5 * oma.T * RI * oma )
self.StoredVariables["CostFunctionJb"].store( Jb )
self.StoredVariables["CostFunctionJo"].store( Jo )
self.StoredVariables["CostFunctionJ" ].store( J )
+ self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( Jb )
+ self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( Jo )
+ self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( J )
#
# Calcul de la covariance d'analyse
# ---------------------------------
# ---------------------------------------
if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"):
self.StoredVariables["CurrentState"].store( numpy.ravel(Xa) )
+ if self._toStore("CurrentOptimum"):
+ self.StoredVariables["CurrentOptimum"].store( numpy.ravel(Xa) )
if self._toStore("Innovation"):
self.StoredVariables["Innovation"].store( numpy.ravel(d) )
if self._toStore("BMA"):
self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
if self._toStore("SimulatedObservationAtCurrentState"):
self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(HXa) )
+ if self._toStore("SimulatedObservationAtCurrentOptimum"):
+ self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( numpy.ravel(HXa) )
if self._toStore("SimulatedObservationAtOptimum"):
self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
#
"APosterioriStandardDeviations",
"APosterioriVariances",
"BMA",
- "OMA",
- "OMB",
- "CurrentState",
"CostFunctionJ",
+ "CostFunctionJAtCurrentOptimum",
"CostFunctionJb",
+ "CostFunctionJbAtCurrentOptimum",
"CostFunctionJo",
+ "CostFunctionJoAtCurrentOptimum",
+ "CurrentOptimum",
+ "CurrentState",
"Innovation",
+ "MahalanobisConsistency",
+ "OMA",
+ "OMB",
"SigmaBck2",
"SigmaObs2",
- "MahalanobisConsistency",
- "SimulationQuantiles",
"SimulatedObservationAtBackground",
+ "SimulatedObservationAtCurrentOptimum",
"SimulatedObservationAtCurrentState",
"SimulatedObservationAtOptimum",
+ "SimulationQuantiles",
]
)
self.defineRequiredParameter(
# Calcul de la fonction coût
# --------------------------
if self._parameters["StoreInternalVariables"] or \
- self._toStore("CostFunctionJ") or \
+ self._toStore("CostFunctionJ") or self._toStore("CostFunctionJAtCurrentOptimum") or \
+ self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \
+ self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \
self._toStore("OMA") or \
self._toStore("SigmaObs2") or \
self._toStore("MahalanobisConsistency") or \
+ self._toStore("SimulatedObservationAtCurrentOptimum") or \
self._toStore("SimulatedObservationAtCurrentState") or \
self._toStore("SimulatedObservationAtOptimum") or \
self._toStore("SimulationQuantiles"):
HXa = numpy.matrix(numpy.ravel( H( Xa ) )).T
oma = Y - HXa
if self._parameters["StoreInternalVariables"] or \
- self._toStore("CostFunctionJ") or \
+ self._toStore("CostFunctionJ") or self._toStore("CostFunctionJAtCurrentOptimum") or \
+ self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \
+ self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \
self._toStore("MahalanobisConsistency"):
Jb = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
Jo = float( 0.5 * oma.T * RI * oma )
self.StoredVariables["CostFunctionJb"].store( Jb )
self.StoredVariables["CostFunctionJo"].store( Jo )
self.StoredVariables["CostFunctionJ" ].store( J )
+ self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( Jb )
+ self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( Jo )
+ self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( J )
#
# Calcul de la covariance d'analyse
# ---------------------------------
# ---------------------------------------
if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"):
self.StoredVariables["CurrentState"].store( numpy.ravel(Xa) )
+ if self._toStore("CurrentOptimum"):
+ self.StoredVariables["CurrentOptimum"].store( numpy.ravel(Xa) )
if self._toStore("Innovation"):
self.StoredVariables["Innovation"].store( numpy.ravel(d) )
if self._toStore("BMA"):
self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
if self._toStore("SimulatedObservationAtCurrentState"):
self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(HXa) )
+ if self._toStore("SimulatedObservationAtCurrentOptimum"):
+ self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( numpy.ravel(HXa) )
if self._toStore("SimulatedObservationAtOptimum"):
self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
#
default = [],
typecast = tuple,
message = "Liste de calculs supplémentaires à stocker et/ou effectuer",
- listval = ["OMA", "CurrentState", "CostFunctionJ", "CostFunctionJb", "CostFunctionJo", "SimulatedObservationAtCurrentState", "SimulatedObservationAtOptimum"]
+ listval = [
+ "CostFunctionJ",
+ "CostFunctionJAtCurrentOptimum",
+ "CostFunctionJb",
+ "CostFunctionJbAtCurrentOptimum",
+ "CostFunctionJo",
+ "CostFunctionJoAtCurrentOptimum",
+ "CurrentOptimum",
+ "CurrentState",
+ "OMA",
+ "SimulatedObservationAtCurrentOptimum",
+ "SimulatedObservationAtCurrentState",
+ "SimulatedObservationAtOptimum",
+ ]
)
self.requireInputArguments(
mandatory= ("Y", "HO", "R"),
# Calcul de la fonction coût
# --------------------------
if self._parameters["StoreInternalVariables"] or \
- self._toStore("CostFunctionJ") or \
+ self._toStore("CostFunctionJ") or self._toStore("CostFunctionJAtCurrentOptimum") or \
+ self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \
+ self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \
self._toStore("OMA") or \
+ self._toStore("SimulatedObservationAtCurrentOptimum") or \
+ self._toStore("SimulatedObservationAtCurrentState") or \
self._toStore("SimulatedObservationAtOptimum"):
HXa = Hm * Xa
oma = Y - HXa
if self._parameters["StoreInternalVariables"] or \
- self._toStore("CostFunctionJ"):
+ self._toStore("CostFunctionJ") or self._toStore("CostFunctionJAtCurrentOptimum") or \
+ self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \
+ self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum"):
Jb = 0.
- Jo = 0.5 * oma.T * RI * oma
- J = float( Jb ) + float( Jo )
+ Jo = float( 0.5 * oma.T * RI * oma )
+ J = Jb + Jo
self.StoredVariables["CostFunctionJb"].store( Jb )
self.StoredVariables["CostFunctionJo"].store( Jo )
self.StoredVariables["CostFunctionJ" ].store( J )
+ self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( Jb )
+ self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( Jo )
+ self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( J )
#
# Calculs et/ou stockages supplémentaires
# ---------------------------------------
if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"):
self.StoredVariables["CurrentState"].store( numpy.ravel(Xa) )
+ if self._toStore("CurrentOptimum"):
+ self.StoredVariables["CurrentOptimum"].store( numpy.ravel(Xa) )
if self._toStore("OMA"):
self.StoredVariables["OMA"].store( numpy.ravel(oma) )
+ if self._toStore("SimulatedObservationAtBackground"):
+ self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
if self._toStore("SimulatedObservationAtCurrentState"):
self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(HXa) )
+ if self._toStore("SimulatedObservationAtCurrentOptimum"):
+ self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( numpy.ravel(HXa) )
if self._toStore("SimulatedObservationAtOptimum"):
self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
#
self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
if self._toStore("SimulatedObservationAtCurrentOptimum"):
self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
- if self._toStore("CostFunctionJAtCurrentOptimum"):
- self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
if self._toStore("CostFunctionJbAtCurrentOptimum"):
self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
if self._toStore("CostFunctionJoAtCurrentOptimum"):
self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
+ if self._toStore("CostFunctionJAtCurrentOptimum"):
+ self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
return J
#
def GradientOfCostFunction(x):
SET(TEST_NAMES
Definition_complete_de_cas_3DVAR
+ Definition_complete_de_cas_Blue
)
FOREACH(tfile ${TEST_NAMES})
--- /dev/null
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2008-2019 EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
+"Verification d'un exemple de la documentation"
+
+import sys
+import unittest
+import numpy
+
+# ==============================================================================
+#
+# Construction artificielle d'un exemple de donnees utilisateur
+# -------------------------------------------------------------
+alpha = 5.
+beta = 7
+gamma = 9.0
+#
+alphamin, alphamax = 0., 10.
+betamin, betamax = 3, 13
+gammamin, gammamax = 1.5, 15.5
+#
+def simulation(x):
+ "Fonction de simulation H pour effectuer Y=H(X)"
+ import numpy
+ __x = numpy.matrix(numpy.ravel(numpy.matrix(x))).T
+ __H = numpy.matrix("1 0 0;0 2 0;0 0 3; 1 2 3")
+ return __H * __x
+#
+def multisimulation( xserie ):
+ yserie = []
+ for x in xserie:
+ yserie.append( simulation( x ) )
+ return yserie
+#
+# Observations obtenues par simulation
+# ------------------------------------
+observations = simulation((2, 3, 4))
+
+# ==============================================================================
+class InTest(unittest.TestCase):
+ def test1(self):
+ print("""Exemple de la doc :
+
+ Exploitation independante des resultats d'un cas de calcul
+ ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+ """)
+ #
+ import numpy
+ from adao import adaoBuilder
+ #
+ # Mise en forme des entrees
+ # -------------------------
+ Xb = (alpha, beta, gamma)
+ Bounds = (
+ (alphamin, alphamax),
+ (betamin, betamax ),
+ (gammamin, gammamax))
+ #
+ # TUI ADAO
+ # --------
+ case = adaoBuilder.New()
+ case.set( 'AlgorithmParameters',
+ Algorithm = 'Blue', # Mots-clé réservé
+ Parameters = { # Dictionnaire
+ "StoreSupplementaryCalculations":[# Liste de mots-clés réservés
+ "CostFunctionJAtCurrentOptimum",
+ "CostFunctionJoAtCurrentOptimum",
+ "CurrentOptimum",
+ "SimulatedObservationAtCurrentOptimum",
+ "SimulatedObservationAtOptimum",
+ ],
+ }
+ )
+ case.set( 'Background',
+ Vector = numpy.array(Xb), # array, list, tuple, matrix
+ Stored = True, # Bool
+ )
+ case.set( 'Observation',
+ Vector = numpy.array(observations), # array, list, tuple, matrix
+ Stored = False, # Bool
+ )
+ case.set( 'BackgroundError',
+ Matrix = None, # None ou matrice carrée
+ ScalarSparseMatrix = 1.0e10, # None ou Real > 0
+ DiagonalSparseMatrix = None, # None ou vecteur
+ )
+ case.set( 'ObservationError',
+ Matrix = None, # None ou matrice carrée
+ ScalarSparseMatrix = 1.0, # None ou Real > 0
+ DiagonalSparseMatrix = None, # None ou vecteur
+ )
+ case.set( 'ObservationOperator',
+ OneFunction = multisimulation, # MultiFonction [Y] = F([X])
+ Parameters = { # Dictionnaire
+ "DifferentialIncrement":0.0001, # Real > 0
+ "CenteredFiniteDifference":False, # Bool
+ },
+ InputFunctionAsMulti = True, # Bool
+ )
+ case.set( 'Observer',
+ Variable = "CurrentState", # Mot-clé
+ Template = "ValuePrinter", # Mot-clé
+ String = None, # None ou code Python
+ Info = None, # None ou string
+
+ )
+ case.execute()
+ #
+ # Exploitation independante
+ # -------------------------
+ Xbackground = case.get("Background")
+ Xoptimum = case.get("Analysis")[-1]
+ FX_at_optimum = case.get("SimulatedObservationAtOptimum")[-1]
+ J_values = case.get("CostFunctionJAtCurrentOptimum")[:]
+ print("")
+ print("Number of internal iterations...: %i"%len(J_values))
+ print("Initial state...................: %s"%(numpy.ravel(Xbackground),))
+ print("Optimal state...................: %s"%(numpy.ravel(Xoptimum),))
+ print("Simulation at optimal state.....: %s"%(numpy.ravel(FX_at_optimum),))
+ print("")
+ #
+ # Fin du cas
+ # ----------
+ ecart = assertAlmostEqualArrays(Xoptimum, [ 2., 3., 4.])
+ #
+ print(" L'écart absolu maximal obtenu lors du test est de %.2e."%ecart)
+ print(" Les résultats obtenus sont corrects.")
+ print("")
+ #
+ return Xoptimum
+
+# ==============================================================================
+def assertAlmostEqualArrays(first, second, places=7, msg=None, delta=None):
+ "Compare two vectors, like unittest.assertAlmostEqual"
+ import numpy
+ if msg is not None:
+ print(msg)
+ if delta is not None:
+ if ( (numpy.asarray(first) - numpy.asarray(second)) > float(delta) ).any():
+ raise AssertionError("%s != %s within %s places"%(first,second,delta))
+ else:
+ if ( (numpy.asarray(first) - numpy.asarray(second)) > 10**(-int(places)) ).any():
+ raise AssertionError("%s != %s within %i places"%(first,second,places))
+ return max(abs(numpy.asarray(first) - numpy.asarray(second)))
+
+# ==============================================================================
+if __name__ == '__main__':
+ print("\nAUTODIAGNOSTIC\n==============")
+ unittest.main()