1 # -*- coding: utf-8 -*-
3 # Copyright (C) 2008-2019 EDF R&D
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22 "Verification d'un exemple de la documentation"
24 # ==============================================================================
26 # Construction artificielle d'un exemple de donnees utilisateur
27 # -------------------------------------------------------------
32 alphamin, alphamax = 0., 10.
33 betamin, betamax = 3, 13
34 gammamin, gammamax = 1.5, 15.5
37 "Fonction de simulation H pour effectuer Y=H(X)"
39 __x = numpy.matrix(numpy.ravel(numpy.matrix(x))).T
40 __H = numpy.matrix("1 0 0;0 2 0;0 0 3; 1 2 3")
43 # Observations obtenues par simulation
44 # ------------------------------------
45 observations = simulation((2, 3, 4))
47 # ==============================================================================
51 from adao import adaoBuilder
53 # Mise en forme des entrees
54 # -------------------------
55 Xb = (alpha, beta, gamma)
63 case = adaoBuilder.New()
65 'AlgorithmParameters',
69 "MaximumNumberOfSteps":100,
70 "StoreSupplementaryCalculations":[
73 "SimulatedObservationAtOptimum",
77 case.set( 'Background', Vector = numpy.array(Xb), Stored = True )
78 case.set( 'Observation', Vector = numpy.array(observations) )
79 case.set( 'BackgroundError', ScalarSparseMatrix = 1.0e10 )
80 case.set( 'ObservationError', ScalarSparseMatrix = 1.0 )
82 'ObservationOperator',
83 OneFunction = simulation,
84 Parameters = {"DifferentialIncrement":0.0001},
86 case.set( 'Observer', Variable="CurrentState", Template="ValuePrinter" )
89 # Exploitation independante
90 # -------------------------
91 Xbackground = case.get("Background")
92 Xoptimum = case.get("Analysis")[-1]
93 FX_at_optimum = case.get("SimulatedObservationAtOptimum")[-1]
94 J_values = case.get("CostFunctionJ")[:]
96 print("Number of internal iterations...: %i"%len(J_values))
97 print("Initial state...................: %s"%(numpy.ravel(Xbackground),))
98 print("Optimal state...................: %s"%(numpy.ravel(Xoptimum),))
99 print("Simulation at optimal state.....: %s"%(numpy.ravel(FX_at_optimum),))
102 return case.get("Analysis")[-1]
104 # ==============================================================================
105 def assertAlmostEqualArrays(first, second, places=7, msg=None, delta=None):
106 "Compare two vectors, like unittest.assertAlmostEqual"
110 if delta is not None:
111 if ( (numpy.asarray(first) - numpy.asarray(second)) > float(delta) ).any():
112 raise AssertionError("%s != %s within %s places"%(first,second,delta))
114 if ( (numpy.asarray(first) - numpy.asarray(second)) > 10**(-int(places)) ).any():
115 raise AssertionError("%s != %s within %i places"%(first,second,places))
116 return max(abs(numpy.asarray(first) - numpy.asarray(second)))
118 # ==============================================================================
119 if __name__ == "__main__":
120 print('\nAUTODIAGNOSTIC\n')
121 print("""Exemple de la doc :
123 Exploitation independante des resultats d'un cas de calcul
124 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
127 ecart = assertAlmostEqualArrays(xa, [ 2., 3., 4.])
129 print(" L'écart absolu maximal obtenu lors du test est de %.2e."%ecart)
130 print(" Les résultats obtenus sont corrects.")