From: Jean-Philippe ARGAUD Date: Thu, 28 Feb 2019 17:22:58 +0000 (+0100) Subject: Adding LocalSensitivityTest algorithm X-Git-Tag: V9_3_0rc2~34 X-Git-Url: http://git.salome-platform.org/gitweb/?a=commitdiff_plain;h=4902ddfbc3d33f5e91429b91c86b3ad1be35e2c2;p=modules%2Fadao.git Adding LocalSensitivityTest algorithm --- diff --git a/doc/en/ref_algorithm_LocalSensitivityTest.rst b/doc/en/ref_algorithm_LocalSensitivityTest.rst new file mode 100644 index 0000000..2186dce --- /dev/null +++ b/doc/en/ref_algorithm_LocalSensitivityTest.rst @@ -0,0 +1,96 @@ +.. + Copyright (C) 2008-2019 EDF R&D + + This file is part of SALOME ADAO module. + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + + See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com + + Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D + +.. index:: single: LocalSensitivityTest +.. _section_ref_algorithm_LocalSensitivityTest: + +Checking algorithm "*LocalSensitivityTest*" +------------------------------------------- + +.. ------------------------------------ .. +.. include:: snippets/Header2Algo00.rst + +.. ------------------------------------ .. +.. include:: snippets/Header2Algo01.rst + +This algorithm allows to calculate the value of the Jacobian of the operator +:math:`H` with respect to the input variables :math:`\mathbf{x}`. This operator +appears in the relation: + +.. math:: \mathbf{y} = H(\mathbf{x}) + +(see :ref:`section_theory` for further explanations). This Jacobian is the +linearized operator (or the tangent one) :math:`\mathbf{H}` of the :math:`H` +near the chosen checking point. + +.. ------------------------------------ .. +.. include:: snippets/Header2Algo02.rst + +.. include:: snippets/CheckingPoint.rst + +.. include:: snippets/Observation.rst + +*Remark : the observation in only used to enforce dimension checking, so one +can give unrealistic vector of the right size. +Example :* ``numpy.ones()`` + +.. include:: snippets/ObservationOperator.rst + +.. ------------------------------------ .. +.. include:: snippets/Header2Algo03Chck.rst + +.. include:: snippets/SetDebug.rst + +StoreSupplementaryCalculations + .. index:: single: StoreSupplementaryCalculations + + This list indicates the names of the supplementary variables that can be + available at the end of the algorithm. It involves potentially costly + calculations or memory consumptions. The default is a void list, none of + these variables being calculated and stored by default. The possible names + are in the following list: [ + "CurrentState", + "JacobianMatrixAtCurrentState", + "SimulatedObservationAtCurrentState", + ]. + + Example : + ``{"StoreSupplementaryCalculations":["CurrentState"]}`` + +.. ------------------------------------ .. +.. include:: snippets/Header2Algo04.rst + +.. include:: snippets/JacobianMatrixAtCurrentState.rst + +.. ------------------------------------ .. +.. include:: snippets/Header2Algo05.rst + +.. include:: snippets/CurrentState.rst + +.. include:: snippets/SimulatedObservationAtCurrentState.rst + +.. ------------------------------------ .. +.. include:: snippets/Header2Algo06.rst + +- :ref:`section_ref_algorithm_FunctionTest` +- :ref:`section_ref_algorithm_GradientTest` diff --git a/doc/fr/ref_algorithm_LocalSensitivityTest.rst b/doc/fr/ref_algorithm_LocalSensitivityTest.rst new file mode 100644 index 0000000..7d64101 --- /dev/null +++ b/doc/fr/ref_algorithm_LocalSensitivityTest.rst @@ -0,0 +1,97 @@ +.. + Copyright (C) 2008-2019 EDF R&D + + This file is part of SALOME ADAO module. + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + + See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com + + Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D + +.. index:: single: LocalSensitivityTest +.. _section_ref_algorithm_LocalSensitivityTest: + +Algorithme de vérification "*LocalSensitivityTest*" +--------------------------------------------------- + +.. ------------------------------------ .. +.. include:: snippets/Header2Algo00.rst + +.. ------------------------------------ .. +.. include:: snippets/Header2Algo01.rst + +Cet algorithme permet d'établir la valeur de la Jacobienne de l'opérateur +:math:`H` par rapport aux variables d'entrée :math:`\mathbf{x}`. Cet opérateur +intervient dans la relation : + +.. math:: \mathbf{y} = H(\mathbf{x}) + +(voir :ref:`section_theory` pour de plus amples explications). Cette jacobienne +est l'opérateur linéarisé (ou opérateur tangent) :math:`\mathbf{H}` de +:math:`H` autour du point de vérification choisi. + +.. ------------------------------------ .. +.. include:: snippets/Header2Algo02.rst + +.. include:: snippets/CheckingPoint.rst + +.. include:: snippets/Observation.rst + +*Remarque : l'observation n'étant utilisé que pour renforcer la vérification +des dimensions, elle peut donc être fournie comme un vecteur non réaliste de +la bonne taille. +Exemple :* ``numpy.ones()`` + +.. include:: snippets/ObservationOperator.rst + +.. ------------------------------------ .. +.. include:: snippets/Header2Algo03Chck.rst + +.. include:: snippets/SetDebug.rst + +StoreSupplementaryCalculations + .. index:: single: StoreSupplementaryCalculations + + Cette liste indique les noms des variables supplémentaires qui peuvent être + disponibles à la fin de l'algorithme. Cela implique potentiellement des + calculs ou du stockage coûteux. La valeur par défaut est une liste vide, + aucune de ces variables n'étant calculée et stockée par défaut. Les noms + possibles sont dans la liste suivante : [ + "CurrentState", + "JacobianMatrixAtCurrentState", + "SimulatedObservationAtCurrentState", + ]. + + Exemple : + ``{"StoreSupplementaryCalculations":["CurrentState"]}`` + +.. ------------------------------------ .. +.. include:: snippets/Header2Algo04.rst + +.. include:: snippets/JacobianMatrixAtCurrentState.rst + +.. ------------------------------------ .. +.. include:: snippets/Header2Algo05.rst + +.. include:: snippets/CurrentState.rst + +.. include:: snippets/SimulatedObservationAtCurrentState.rst + +.. ------------------------------------ .. +.. include:: snippets/Header2Algo06.rst + +- :ref:`section_ref_algorithm_FunctionTest` +- :ref:`section_ref_algorithm_GradientTest` diff --git a/src/daComposant/daAlgorithms/LocalSensitivityTest.py b/src/daComposant/daAlgorithms/LocalSensitivityTest.py new file mode 100644 index 0000000..8821124 --- /dev/null +++ b/src/daComposant/daAlgorithms/LocalSensitivityTest.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2008-2019 EDF R&D +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# +# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com +# +# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D + +import sys, logging +from daCore import BasicObjects, PlatformInfo +import numpy, copy + +# ============================================================================== +class ElementaryAlgorithm(BasicObjects.Algorithm): + def __init__(self): + BasicObjects.Algorithm.__init__(self, "LOCALSENSITIVITYTEST") + self.defineRequiredParameter( + name = "SetDebug", + default = False, + typecast = bool, + message = "Activation du mode debug lors de l'exécution", + ) + self.defineRequiredParameter( + name = "StoreSupplementaryCalculations", + default = ["JacobianMatrixAtCurrentState",], + typecast = tuple, + message = "Liste de calculs supplémentaires à stocker et/ou effectuer", + listval = [ + "CurrentState", + "JacobianMatrixAtCurrentState", + "SimulatedObservationAtCurrentState", + ] + ) + self.requireInputArguments( + mandatory= ("Xb", "Y", "HO"), + ) + + def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): + self._pre_run(Parameters, Xb, Y, R, B, Q) + # + if self._parameters["SetDebug"]: + CUR_LEVEL = logging.getLogger().getEffectiveLevel() + logging.getLogger().setLevel(logging.DEBUG) + print("===> Beginning of evaluation, activating debug\n") + print(" %s\n"%("-"*75,)) + # + # ---------- + Ht = HO["Tangent"].asMatrix( Xb ) + Ht = Ht.reshape(Y.size,Xb.size) # ADAO & check shape + # ---------- + # + if self._parameters["SetDebug"]: + print("\n %s\n"%("-"*75,)) + print("===> End evaluation, deactivating debug if necessary\n") + logging.getLogger().setLevel(CUR_LEVEL) + # + if self._toStore("CurrentState"): + self.StoredVariables["CurrentState"].store( Xb ) + if self._toStore("JacobianMatrixAtCurrentState"): + self.StoredVariables["JacobianMatrixAtCurrentState"].store( Ht ) + if self._toStore("SimulatedObservationAtCurrentState"): + if HO["AppliedInX"] is not None and "HXb" in HO["AppliedInX"]: + HXb = HO["AppliedInX"]["HXb"] + else: + HXb = Ht * Xb + HXb = numpy.asmatrix(numpy.ravel( HXb )).T + if Y.size != HXb.size: + raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size)) + if max(Y.shape) != max(HXb.shape): + raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape)) + self.StoredVariables["SimulatedObservationAtCurrentState"].store( HXb ) + # + self._post_run(HO) + return 0 + +# ============================================================================== +if __name__ == "__main__": + print('\n AUTODIAGNOSTIC \n')