X-Git-Url: http://git.salome-platform.org/gitweb/?a=blobdiff_plain;f=src%2FdaComposant%2FdaAlgorithms%2FLinearLeastSquares.py;h=e8af931a9ea25552451e41d0faca6ef3534af557;hb=087028cb881b07298c2b68bf8b2c080dff09a042;hp=855d8a1e73274e367c30a31d45d6b85654452190;hpb=5b0506dcc6d9336fc869443fa70975d27c8f71c1;p=modules%2Fadao.git diff --git a/src/daComposant/daAlgorithms/LinearLeastSquares.py b/src/daComposant/daAlgorithms/LinearLeastSquares.py index 855d8a1..e8af931 100644 --- a/src/daComposant/daAlgorithms/LinearLeastSquares.py +++ b/src/daComposant/daAlgorithms/LinearLeastSquares.py @@ -1,62 +1,126 @@ -#-*-coding:iso-8859-1-*- +# -*- coding: utf-8 -*- # -# Copyright (C) 2008-2009 EDF R&D +# Copyright (C) 2008-2020 EDF R&D # -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License. +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License. # -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. # -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # -# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com +# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com # -__doc__ = """ - Algorithme de moindre carres pondérés (analyse sans ebauche) -""" -__author__ = "Sophie RICCI, Jean-Philippe ARGAUD - Septembre 2008" +# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D -import sys ; sys.path.insert(0, "../daCore") import logging -import Persistence -from BasicObjects import Algorithm -import PlatformInfo ; m = PlatformInfo.SystemUsage() +from daCore import BasicObjects +import numpy # ============================================================================== -class ElementaryAlgorithm(Algorithm): +class ElementaryAlgorithm(BasicObjects.Algorithm): def __init__(self): - Algorithm.__init__(self) - self._name = "LINEARLEASTSQUARES" + BasicObjects.Algorithm.__init__(self, "LINEARLEASTSQUARES") + self.defineRequiredParameter( + name = "StoreInternalVariables", + default = False, + typecast = bool, + message = "Stockage des variables internes ou intermédiaires du calcul", + ) + self.defineRequiredParameter( + name = "StoreSupplementaryCalculations", + default = [], + typecast = tuple, + message = "Liste de calculs supplémentaires à stocker et/ou effectuer", + listval = [ + "Analysis", + "CostFunctionJ", + "CostFunctionJAtCurrentOptimum", + "CostFunctionJb", + "CostFunctionJbAtCurrentOptimum", + "CostFunctionJo", + "CostFunctionJoAtCurrentOptimum", + "CurrentOptimum", + "CurrentState", + "OMA", + "SimulatedObservationAtCurrentOptimum", + "SimulatedObservationAtCurrentState", + "SimulatedObservationAtOptimum", + ] + ) + self.requireInputArguments( + mandatory= ("Y", "HO", "R"), + ) - def run(self, Xb=None, Y=None, H=None, M=None, R=None, B=None, Q=None, Par=None): - """ - Calcul de l'estimateur au sens des moindres carres sans ebauche - """ - logging.debug("%s Lancement"%self._name) - logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("Mo"))) + def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): + self._pre_run(Parameters, Xb, Y, R, B, Q) # - Hm = H["Direct"].asMatrix() - Ht = H["Adjoint"].asMatrix() + Hm = HO["Tangent"].asMatrix(None) + Hm = Hm.reshape(Y.size,-1) # ADAO & check shape + Ha = HO["Adjoint"].asMatrix(None) + Ha = Ha.reshape(-1,Y.size) # ADAO & check shape # - K = (Ht * R.I * Hm ).I * Ht * R.I - Xa = K * Y + RI = R.getI() # + # Calcul de la matrice de gain et de l'analyse + # -------------------------------------------- + K = (Ha * RI * Hm).I * Ha * RI + Xa = K * Y self.StoredVariables["Analysis"].store( Xa.A1 ) # - logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("Mo"))) - logging.debug("%s Terminé"%self._name) + # Calcul de la fonction coût + # -------------------------- + if self._parameters["StoreInternalVariables"] or \ + self._toStore("CostFunctionJ") or self._toStore("CostFunctionJAtCurrentOptimum") or \ + self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \ + self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \ + self._toStore("OMA") or \ + self._toStore("SimulatedObservationAtCurrentOptimum") or \ + self._toStore("SimulatedObservationAtCurrentState") or \ + self._toStore("SimulatedObservationAtOptimum"): + HXa = Hm * Xa + oma = Y - HXa + if self._parameters["StoreInternalVariables"] or \ + self._toStore("CostFunctionJ") or self._toStore("CostFunctionJAtCurrentOptimum") or \ + self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \ + self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum"): + Jb = 0. + Jo = float( 0.5 * oma.T * RI * oma ) + J = Jb + Jo + self.StoredVariables["CostFunctionJb"].store( Jb ) + self.StoredVariables["CostFunctionJo"].store( Jo ) + self.StoredVariables["CostFunctionJ" ].store( J ) + self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( Jb ) + self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( Jo ) + self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( J ) + # + # Calculs et/ou stockages supplémentaires + # --------------------------------------- + if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"): + self.StoredVariables["CurrentState"].store( numpy.ravel(Xa) ) + if self._toStore("CurrentOptimum"): + self.StoredVariables["CurrentOptimum"].store( numpy.ravel(Xa) ) + if self._toStore("OMA"): + self.StoredVariables["OMA"].store( numpy.ravel(oma) ) + if self._toStore("SimulatedObservationAtBackground"): + self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) ) + if self._toStore("SimulatedObservationAtCurrentState"): + self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(HXa) ) + if self._toStore("SimulatedObservationAtCurrentOptimum"): + self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( numpy.ravel(HXa) ) + if self._toStore("SimulatedObservationAtOptimum"): + self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) ) # + self._post_run(HO) return 0 # ============================================================================== if __name__ == "__main__": - print '\n AUTODIAGNOSTIC \n' - - + print('\n AUTODIAGNOSTIC\n')