Salome HOME
Improvement of algorithms arguments validation and tests
[modules/adao.git] / src / daComposant / daAlgorithms / LinearLeastSquares.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2020 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import logging
24 from daCore import BasicObjects
25 import numpy
26
27 # ==============================================================================
28 class ElementaryAlgorithm(BasicObjects.Algorithm):
29     def __init__(self):
30         BasicObjects.Algorithm.__init__(self, "LINEARLEASTSQUARES")
31         self.defineRequiredParameter(
32             name     = "StoreInternalVariables",
33             default  = False,
34             typecast = bool,
35             message  = "Stockage des variables internes ou intermédiaires du calcul",
36             )
37         self.defineRequiredParameter(
38             name     = "StoreSupplementaryCalculations",
39             default  = [],
40             typecast = tuple,
41             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
42             listval  = [
43                 "Analysis",
44                 "CostFunctionJ",
45                 "CostFunctionJAtCurrentOptimum",
46                 "CostFunctionJb",
47                 "CostFunctionJbAtCurrentOptimum",
48                 "CostFunctionJo",
49                 "CostFunctionJoAtCurrentOptimum",
50                 "CurrentOptimum",
51                 "CurrentState",
52                 "OMA",
53                 "SimulatedObservationAtCurrentOptimum",
54                 "SimulatedObservationAtCurrentState",
55                 "SimulatedObservationAtOptimum",
56                 ]
57             )
58         self.requireInputArguments(
59             mandatory= ("Y", "HO", "R"),
60             )
61         self.setAttributes(tags=(
62             "Optimization",
63             "Linear",
64             "Variational",
65             ))
66
67     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
68         self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
69         #
70         Hm = HO["Tangent"].asMatrix(Xb)
71         Hm = Hm.reshape(Y.size,-1) # ADAO & check shape
72         Ha = HO["Adjoint"].asMatrix(Xb)
73         Ha = Ha.reshape(-1,Y.size) # ADAO & check shape
74         #
75         RI = R.getI()
76         #
77         # Calcul de la matrice de gain et de l'analyse
78         # --------------------------------------------
79         K = (Ha * RI * Hm).I * Ha * RI
80         Xa =  K * Y
81         self.StoredVariables["Analysis"].store( Xa.A1 )
82         #
83         # Calcul de la fonction coût
84         # --------------------------
85         if self._parameters["StoreInternalVariables"] or \
86             self._toStore("CostFunctionJ")  or self._toStore("CostFunctionJAtCurrentOptimum") or \
87             self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \
88             self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \
89             self._toStore("OMA") or \
90             self._toStore("SimulatedObservationAtCurrentOptimum") or \
91             self._toStore("SimulatedObservationAtCurrentState") or \
92             self._toStore("SimulatedObservationAtOptimum"):
93             HXa = Hm * Xa
94             oma = Y - HXa
95         if self._parameters["StoreInternalVariables"] or \
96             self._toStore("CostFunctionJ")  or self._toStore("CostFunctionJAtCurrentOptimum") or \
97             self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \
98             self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum"):
99             Jb  = 0.
100             Jo  = float( 0.5 * oma.T * RI * oma )
101             J   = Jb + Jo
102             self.StoredVariables["CostFunctionJb"].store( Jb )
103             self.StoredVariables["CostFunctionJo"].store( Jo )
104             self.StoredVariables["CostFunctionJ" ].store( J )
105             self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( Jb )
106             self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( Jo )
107             self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( J )
108         #
109         # Calculs et/ou stockages supplémentaires
110         # ---------------------------------------
111         if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"):
112             self.StoredVariables["CurrentState"].store( numpy.ravel(Xa) )
113         if self._toStore("CurrentOptimum"):
114             self.StoredVariables["CurrentOptimum"].store( numpy.ravel(Xa) )
115         if self._toStore("OMA"):
116             self.StoredVariables["OMA"].store( numpy.ravel(oma) )
117         if self._toStore("SimulatedObservationAtBackground"):
118             self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
119         if self._toStore("SimulatedObservationAtCurrentState"):
120             self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(HXa) )
121         if self._toStore("SimulatedObservationAtCurrentOptimum"):
122             self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( numpy.ravel(HXa) )
123         if self._toStore("SimulatedObservationAtOptimum"):
124             self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
125         #
126         self._post_run(HO)
127         return 0
128
129 # ==============================================================================
130 if __name__ == "__main__":
131     print('\n AUTODIAGNOSTIC\n')