Salome HOME
f98ff4b28d896cfe345b6e55421887e10b0266e6
[modules/adao.git] / src / daComposant / daAlgorithms / LinearLeastSquares.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2019 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import logging
24 from daCore import BasicObjects
25 import numpy
26
27 # ==============================================================================
28 class ElementaryAlgorithm(BasicObjects.Algorithm):
29     def __init__(self):
30         BasicObjects.Algorithm.__init__(self, "LINEARLEASTSQUARES")
31         self.defineRequiredParameter(
32             name     = "StoreInternalVariables",
33             default  = False,
34             typecast = bool,
35             message  = "Stockage des variables internes ou intermédiaires du calcul",
36             )
37         self.defineRequiredParameter(
38             name     = "StoreSupplementaryCalculations",
39             default  = [],
40             typecast = tuple,
41             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
42             listval  = [
43                 "Analysis",
44                 "CostFunctionJ",
45                 "CostFunctionJAtCurrentOptimum",
46                 "CostFunctionJb",
47                 "CostFunctionJbAtCurrentOptimum",
48                 "CostFunctionJo",
49                 "CostFunctionJoAtCurrentOptimum",
50                 "CurrentOptimum",
51                 "CurrentState",
52                 "OMA",
53                 "SimulatedObservationAtCurrentOptimum",
54                 "SimulatedObservationAtCurrentState",
55                 "SimulatedObservationAtOptimum",
56                 ]
57             )
58         self.requireInputArguments(
59             mandatory= ("Y", "HO", "R"),
60             )
61
62     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
63         self._pre_run(Parameters, Xb, Y, R, B, Q)
64         #
65         Hm = HO["Tangent"].asMatrix(None)
66         Hm = Hm.reshape(Y.size,-1) # ADAO & check shape
67         Ha = HO["Adjoint"].asMatrix(None)
68         Ha = Ha.reshape(-1,Y.size) # ADAO & check shape
69         #
70         RI = R.getI()
71         #
72         # Calcul de la matrice de gain et de l'analyse
73         # --------------------------------------------
74         K = (Ha * RI * Hm).I * Ha * RI
75         Xa =  K * Y
76         self.StoredVariables["Analysis"].store( Xa.A1 )
77         #
78         # Calcul de la fonction coût
79         # --------------------------
80         if self._parameters["StoreInternalVariables"] or \
81             self._toStore("CostFunctionJ")  or self._toStore("CostFunctionJAtCurrentOptimum") or \
82             self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \
83             self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \
84             self._toStore("OMA") or \
85             self._toStore("SimulatedObservationAtCurrentOptimum") or \
86             self._toStore("SimulatedObservationAtCurrentState") or \
87             self._toStore("SimulatedObservationAtOptimum"):
88             HXa = Hm * Xa
89             oma = Y - HXa
90         if self._parameters["StoreInternalVariables"] or \
91             self._toStore("CostFunctionJ")  or self._toStore("CostFunctionJAtCurrentOptimum") or \
92             self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \
93             self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum"):
94             Jb  = 0.
95             Jo  = float( 0.5 * oma.T * RI * oma )
96             J   = Jb + Jo
97             self.StoredVariables["CostFunctionJb"].store( Jb )
98             self.StoredVariables["CostFunctionJo"].store( Jo )
99             self.StoredVariables["CostFunctionJ" ].store( J )
100             self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( Jb )
101             self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( Jo )
102             self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( J )
103         #
104         # Calculs et/ou stockages supplémentaires
105         # ---------------------------------------
106         if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"):
107             self.StoredVariables["CurrentState"].store( numpy.ravel(Xa) )
108         if self._toStore("CurrentOptimum"):
109             self.StoredVariables["CurrentOptimum"].store( numpy.ravel(Xa) )
110         if self._toStore("OMA"):
111             self.StoredVariables["OMA"].store( numpy.ravel(oma) )
112         if self._toStore("SimulatedObservationAtBackground"):
113             self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
114         if self._toStore("SimulatedObservationAtCurrentState"):
115             self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(HXa) )
116         if self._toStore("SimulatedObservationAtCurrentOptimum"):
117             self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( numpy.ravel(HXa) )
118         if self._toStore("SimulatedObservationAtOptimum"):
119             self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
120         #
121         self._post_run(HO)
122         return 0
123
124 # ==============================================================================
125 if __name__ == "__main__":
126     print('\n AUTODIAGNOSTIC\n')