X-Git-Url: http://git.salome-platform.org/gitweb/?a=blobdiff_plain;f=src%2FdaComposant%2FdaAlgorithms%2FLinearLeastSquares.py;h=d63bbdec41121c07532800b54b0dd7b4bc36538f;hb=59c9144c3b491494cb2beeb10093f232a5e13f2a;hp=3d59e9e4734dc3eda4b0836688adfc1a95aad0fc;hpb=c73864264aff5fcc69c24cdcbc84af0d3e0179a6;p=modules%2Fadao.git diff --git a/src/daComposant/daAlgorithms/LinearLeastSquares.py b/src/daComposant/daAlgorithms/LinearLeastSquares.py index 3d59e9e..d63bbde 100644 --- a/src/daComposant/daAlgorithms/LinearLeastSquares.py +++ b/src/daComposant/daAlgorithms/LinearLeastSquares.py @@ -1,89 +1,131 @@ -#-*-coding:iso-8859-1-*- +# -*- coding: utf-8 -*- # -# Copyright (C) 2008-2013 EDF R&D +# Copyright (C) 2008-2020 EDF R&D # -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License. +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License. # -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. # -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # -# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com +# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com # -# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D +# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D import logging -from daCore import BasicObjects, PlatformInfo -m = PlatformInfo.SystemUsage() - +from daCore import BasicObjects import numpy # ============================================================================== class ElementaryAlgorithm(BasicObjects.Algorithm): def __init__(self): BasicObjects.Algorithm.__init__(self, "LINEARLEASTSQUARES") + self.defineRequiredParameter( + name = "StoreInternalVariables", + default = False, + typecast = bool, + message = "Stockage des variables internes ou intermédiaires du calcul", + ) self.defineRequiredParameter( name = "StoreSupplementaryCalculations", default = [], typecast = tuple, - message = "Liste de calculs supplémentaires à stocker et/ou effectuer", - listval = ["OMA"] + message = "Liste de calculs supplémentaires à stocker et/ou effectuer", + listval = [ + "Analysis", + "CostFunctionJ", + "CostFunctionJAtCurrentOptimum", + "CostFunctionJb", + "CostFunctionJbAtCurrentOptimum", + "CostFunctionJo", + "CostFunctionJoAtCurrentOptimum", + "CurrentOptimum", + "CurrentState", + "OMA", + "SimulatedObservationAtCurrentOptimum", + "SimulatedObservationAtCurrentState", + "SimulatedObservationAtOptimum", + ] + ) + self.requireInputArguments( + mandatory= ("Y", "HO", "R"), ) + self.setAttributes(tags=( + "Optimization", + "Linear", + "Variational", + )) - def run(self, Xb=None, Y=None, H=None, M=None, R=None, B=None, Q=None, Parameters=None): - logging.debug("%s Lancement"%self._name) - logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) + def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): + self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # - # Paramètres de pilotage - # ---------------------- - self.setParameters(Parameters) + Hm = HO["Tangent"].asMatrix(Xb) + Hm = Hm.reshape(Y.size,-1) # ADAO & check shape + Ha = HO["Adjoint"].asMatrix(Xb) + Ha = Ha.reshape(-1,Y.size) # ADAO & check shape # - # Opérateur d'observation - # ----------------------- - Hm = H["Tangent"].asMatrix(None) - Ha = H["Adjoint"].asMatrix(None) - # - if R is not None: - RI = R.I - elif self._parameters["R_scalar"] is not None: - RI = 1.0 / self._parameters["R_scalar"] - else: - raise ValueError("Observation error covariance matrix has to be properly defined!") + RI = R.getI() # # Calcul de la matrice de gain et de l'analyse # -------------------------------------------- - K = (Ha * RI * Hm ).I * Ha * RI + K = (Ha * RI * Hm).I * Ha * RI Xa = K * Y + self.StoredVariables["Analysis"].store( Xa.A1 ) # - # Calcul de la fonction coût + # Calcul de la fonction coût # -------------------------- - oma = Y - Hm * Xa - Jb = 0. - Jo = 0.5 * oma.T * RI * oma - J = float( Jb ) + float( Jo ) - self.StoredVariables["Analysis"].store( Xa.A1 ) - self.StoredVariables["CostFunctionJb"].store( Jb ) - self.StoredVariables["CostFunctionJo"].store( Jo ) - self.StoredVariables["CostFunctionJ" ].store( J ) + if self._parameters["StoreInternalVariables"] or \ + self._toStore("CostFunctionJ") or self._toStore("CostFunctionJAtCurrentOptimum") or \ + self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \ + self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \ + self._toStore("OMA") or \ + self._toStore("SimulatedObservationAtCurrentOptimum") or \ + self._toStore("SimulatedObservationAtCurrentState") or \ + self._toStore("SimulatedObservationAtOptimum"): + HXa = Hm * Xa + oma = Y - HXa + if self._parameters["StoreInternalVariables"] or \ + self._toStore("CostFunctionJ") or self._toStore("CostFunctionJAtCurrentOptimum") or \ + self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \ + self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum"): + Jb = 0. + Jo = float( 0.5 * oma.T * RI * oma ) + J = Jb + Jo + self.StoredVariables["CostFunctionJb"].store( Jb ) + self.StoredVariables["CostFunctionJo"].store( Jo ) + self.StoredVariables["CostFunctionJ" ].store( J ) + self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( Jb ) + self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( Jo ) + self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( J ) # - # Calculs et/ou stockages supplémentaires + # Calculs et/ou stockages supplémentaires # --------------------------------------- - if "OMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"): + self.StoredVariables["CurrentState"].store( numpy.ravel(Xa) ) + if self._toStore("CurrentOptimum"): + self.StoredVariables["CurrentOptimum"].store( numpy.ravel(Xa) ) + if self._toStore("OMA"): self.StoredVariables["OMA"].store( numpy.ravel(oma) ) + if self._toStore("SimulatedObservationAtBackground"): + self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) ) + if self._toStore("SimulatedObservationAtCurrentState"): + self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(HXa) ) + if self._toStore("SimulatedObservationAtCurrentOptimum"): + self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( numpy.ravel(HXa) ) + if self._toStore("SimulatedObservationAtOptimum"): + self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) ) # - logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) - logging.debug("%s Terminé"%self._name) - # + self._post_run(HO) return 0 # ============================================================================== if __name__ == "__main__": - print '\n AUTODIAGNOSTIC \n' + print('\n AUTODIAGNOSTIC\n')