1 # -*- coding: utf-8 -*-
3 # Copyright (C) 2008-2018 EDF R&D
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22 "Verification d'un exemple de la documentation"
24 from utExtend import assertAlmostEqualArrays
26 # ==============================================================================
28 # Construction artificielle d'un exemple de donnees utilisateur
29 # -------------------------------------------------------------
34 alphamin, alphamax = 0., 10.
35 betamin, betamax = 3, 13
36 gammamin, gammamax = 1.5, 15.5
39 "Fonction de simulation H pour effectuer Y=H(X)"
41 __x = numpy.matrix(numpy.ravel(numpy.matrix(x))).T
42 __H = numpy.matrix("1 0 0;0 2 0;0 0 3; 1 2 3")
45 # Observations obtenues par simulation
46 # ------------------------------------
47 observations = simulation((2, 3, 4))
49 # ==============================================================================
53 from adao import adaoBuilder
55 # Mise en forme des entrees
56 # -------------------------
57 Xb = (alpha, beta, gamma)
65 case = adaoBuilder.New()
67 'AlgorithmParameters',
71 "MaximumNumberOfSteps":100,
72 "StoreSupplementaryCalculations":[
75 "SimulatedObservationAtOptimum",
79 case.set( 'Background', Vector = numpy.array(Xb), Stored = True )
80 case.set( 'Observation', Vector = numpy.array(observations) )
81 case.set( 'BackgroundError', ScalarSparseMatrix = 1.0e10 )
82 case.set( 'ObservationError', ScalarSparseMatrix = 1.0 )
84 'ObservationOperator',
85 OneFunction = simulation,
86 Parameters = {"DifferentialIncrement":0.0001},
88 case.set( 'Observer', Variable="CurrentState", Template="ValuePrinter" )
91 # Exploitation independante
92 # -------------------------
93 Xbackground = case.get("Background")
94 Xoptimum = case.get("Analysis")[-1]
95 FX_at_optimum = case.get("SimulatedObservationAtOptimum")[-1]
96 J_values = case.get("CostFunctionJ")[:]
98 print("Number of internal iterations...: %i"%len(J_values))
99 print("Initial state...................: %s"%(numpy.ravel(Xbackground),))
100 print("Optimal state...................: %s"%(numpy.ravel(Xoptimum),))
101 print("Simulation at optimal state.....: %s"%(numpy.ravel(FX_at_optimum),))
104 return case.get("Analysis")[-1]
106 # ==============================================================================
107 if __name__ == "__main__":
108 print('\nAUTODIAGNOSTIC\n')
109 print("""Exemple de la doc :
111 Exploitation independante des resultats d'un cas de calcul
112 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
115 assertAlmostEqualArrays(xa, [ 2., 3., 4.])