1 # -*- coding: utf-8 -*-
3 # Copyright (C) 2008-2019 EDF R&D
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22 "Verification d'un exemple de la documentation"
24 from utExtend import assertAlmostEqualArrays
26 # ==============================================================================
28 # Construction artificielle d'un exemple de donnees utilisateur
29 # -------------------------------------------------------------
34 alphamin, alphamax = 0., 10.
35 betamin, betamax = 3, 13
36 gammamin, gammamax = 1.5, 15.5
39 "Fonction de simulation H pour effectuer Y=H(X)"
41 __x = numpy.matrix(numpy.ravel(numpy.matrix(x))).T
42 __H = numpy.matrix("1 0 0;0 2 0;0 0 3; 1 2 3")
45 def multisimulation( xserie ):
48 yserie.append( simulation( x ) )
51 # Observations obtenues par simulation
52 # ------------------------------------
53 observations = simulation((2, 3, 4))
55 # ==============================================================================
59 from adao import adaoBuilder
61 # Mise en forme des entrees
62 # -------------------------
63 Xb = (alpha, beta, gamma)
71 case = adaoBuilder.New()
73 'AlgorithmParameters',
77 "MaximumNumberOfSteps":100,
78 "StoreSupplementaryCalculations":[
81 "SimulatedObservationAtOptimum",
85 case.set( 'Background', Vector = numpy.array(Xb), Stored = True )
86 case.set( 'Observation', Vector = numpy.array(observations) )
87 case.set( 'BackgroundError', ScalarSparseMatrix = 1.0e10 )
88 case.set( 'ObservationError', ScalarSparseMatrix = 1.0 )
90 'ObservationOperator',
91 OneFunction = multisimulation,
92 Parameters = {"DifferentialIncrement":0.0001},
93 InputFunctionAsMulti = True,
95 case.set( 'Observer', Variable="CurrentState", Template="ValuePrinter" )
98 # Exploitation independante
99 # -------------------------
100 Xbackground = case.get("Background")
101 Xoptimum = case.get("Analysis")[-1]
102 FX_at_optimum = case.get("SimulatedObservationAtOptimum")[-1]
103 J_values = case.get("CostFunctionJ")[:]
105 print("Number of internal iterations...: %i"%len(J_values))
106 print("Initial state...................: %s"%(numpy.ravel(Xbackground),))
107 print("Optimal state...................: %s"%(numpy.ravel(Xoptimum),))
108 print("Simulation at optimal state.....: %s"%(numpy.ravel(FX_at_optimum),))
111 return case.get("Analysis")[-1]
113 # ==============================================================================
114 if __name__ == "__main__":
115 print('\nAUTODIAGNOSTIC\n')
116 print("""Exemple de la doc :
118 Exploitation independante des resultats d'un cas de calcul
119 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
122 ecart = assertAlmostEqualArrays(xa, [ 2., 3., 4.])
124 print(" L'écart absolu maximal obtenu lors du test est de %.2e."%ecart)
125 print(" Les résultats obtenus sont corrects.")