]> SALOME platform Git repositories - modules/adao.git/commitdiff
Salome HOME
Adding examples for *FunctionTest (EN)
authorJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Sun, 19 Jun 2022 18:37:41 +0000 (20:37 +0200)
committerJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Sun, 19 Jun 2022 18:43:04 +0000 (20:43 +0200)
doc/en/ref_algorithm_FunctionTest.rst
doc/en/ref_algorithm_ParallelFunctionTest.rst
doc/en/scripts/simple_FunctionTest.py [new file with mode: 0644]
doc/en/scripts/simple_FunctionTest.res [new file with mode: 0644]
doc/en/scripts/simple_FunctionTest.rst [new file with mode: 0644]
doc/en/scripts/simple_ParallelFunctionTest.py [new file with mode: 0644]
doc/en/scripts/simple_ParallelFunctionTest.res [new file with mode: 0644]
doc/en/scripts/simple_ParallelFunctionTest.rst [new file with mode: 0644]
src/daComposant/daAlgorithms/FunctionTest.py
src/daComposant/daAlgorithms/ParallelFunctionTest.py

index b7e4ad4fdd6b20708573d9c8668a34c5451da3a4..ca4866ad756d9dd70721d905515776308bdb80bc 100644 (file)
@@ -30,10 +30,11 @@ Checking algorithm "*FunctionTest*"
 .. ------------------------------------ ..
 .. include:: snippets/Header2Algo01.rst
 
-This algorithm allows to verify that the observation operator is working
-correctly and that its call is compatible with its usage in ADAO algorithms. In
-practice, it allows to call one or several times the operator, activating or not
-the "debug" mode during execution.
+This algorithm allows to verify that an operator, in particular the
+observation one, is working correctly and that its call is compatible
+with its usage in ADAO algorithms. In practice, it allows to call one
+or several times the operator, activating or not the "debug" mode
+during execution.
 
 Statistics on input and output vectors for each execution of operator are
 given, and an another global statistic is given at the end of the checking
@@ -90,6 +91,17 @@ StoreSupplementaryCalculations
 
 .. include:: snippets/SimulatedObservationAtCurrentState.rst
 
+.. ------------------------------------ ..
+.. include:: snippets/Header2Algo09.rst
+
+.. include:: scripts/simple_FunctionTest.rst
+
+.. literalinclude:: scripts/simple_FunctionTest.py
+
+.. include:: snippets/Header2Algo10.rst
+
+.. literalinclude:: scripts/simple_FunctionTest.res
+
 .. ------------------------------------ ..
 .. include:: snippets/Header2Algo06.rst
 
index c2540de1339b4e0ebd3b60032dc3bdf1a251e279..df13223c0017bfd5a2c24c0ae0b35e878d3619f4 100644 (file)
@@ -30,10 +30,11 @@ Checking algorithm "*ParallelFunctionTest*"
 .. ------------------------------------ ..
 .. include:: snippets/Header2Algo01.rst
 
-This algorithm allows to verify that the observation operator is working
-correctly in parallel and that its call is compatible with its usage in ADAO
-algorithms. In practice, it allows to call one or several times the operator in
-parallel, activating or not the "debug" mode during execution.
+This algorithm allows to verify that an operator, in particular the
+observation one, is working correctly in parallel and that its call is
+compatible with its usage in ADAO algorithms. In practice, it allows to
+call one or several times the operator in parallel, activating or not
+the "debug" mode during execution.
 
 Statistics on input and output vectors for each execution of operator are
 given, and an another global statistic is given at the end of the checking
@@ -90,6 +91,17 @@ StoreSupplementaryCalculations
 
 .. include:: snippets/SimulatedObservationAtCurrentState.rst
 
+.. ------------------------------------ ..
+.. include:: snippets/Header2Algo09.rst
+
+.. include:: scripts/simple_ParallelFunctionTest.rst
+
+.. literalinclude:: scripts/simple_ParallelFunctionTest.py
+
+.. include:: snippets/Header2Algo10.rst
+
+.. literalinclude:: scripts/simple_ParallelFunctionTest.res
+
 .. ------------------------------------ ..
 .. include:: snippets/Header2Algo06.rst
 
diff --git a/doc/en/scripts/simple_FunctionTest.py b/doc/en/scripts/simple_FunctionTest.py
new file mode 100644 (file)
index 0000000..638d6dd
--- /dev/null
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+#
+from numpy import array, eye
+from adao import adaoBuilder
+case = adaoBuilder.New('')
+case.setCheckingPoint( Vector = array([0., 1., 2.]), Stored=True )
+case.setObservationOperator( Matrix = eye(3), )
+case.setAlgorithmParameters(
+    Algorithm='FunctionTest',
+    Parameters={
+        'NumberOfRepetition' : 5,
+        'NumberOfPrintedDigits' : 2,
+        "ShowElementarySummary":False,
+        },
+    )
+case.execute()
diff --git a/doc/en/scripts/simple_FunctionTest.res b/doc/en/scripts/simple_FunctionTest.res
new file mode 100644 (file)
index 0000000..8f57585
--- /dev/null
@@ -0,0 +1,52 @@
+===> Information before launching:
+     -----------------------------
+     Characteristics of input vector X, internally converted:
+       Type...............: <class 'numpy.ndarray'>
+       Lenght of vector...: 3
+       Minimum value......: 0.00e+00
+       Maximum value......: 2.00e+00
+       Mean of vector.....: 1.00e+00
+       Standard error.....: 8.16e-01
+       L2 norm of vector..: 2.24e+00
+
+     ---------------------------------------------------------------------------
+
+===> Beginning of repeated evaluation, without activating debug
+
+     ---------------------------------------------------------------------------
+
+===> End of repeated evaluation, without deactivating debug
+
+     ---------------------------------------------------------------------------
+
+===> Launching statistical summary calculation for 5 states
+
+     ---------------------------------------------------------------------------
+
+===> Statistical analysis of the outputs obtained through sequential repeated evaluations
+
+     (Remark: numbers that are (about) under 2e-16 represent 0 to machine precision)
+
+     Characteristics of the whole set of outputs Y:
+       Number of evaluations.........................: 5
+       Minimum value of the whole set of outputs.....: 0.00e+00
+       Maximum value of the whole set of outputs.....: 2.00e+00
+       Mean of vector of the whole set of outputs....: 1.00e+00
+       Standard error of the whole set of outputs....: 8.16e-01
+
+     Characteristics of the vector Ym, mean of the outputs Y:
+       Size of the mean of the outputs...............: 3
+       Minimum value of the mean of the outputs......: 0.00e+00
+       Maximum value of the mean of the outputs......: 2.00e+00
+       Mean of the mean of the outputs...............: 1.00e+00
+       Standard error of the mean of the outputs.....: 8.16e-01
+
+     Characteristics of the mean of the differences between the outputs Y and their mean Ym:
+       Size of the mean of the differences...........: 3
+       Minimum value of the mean of the differences..: 0.00e+00
+       Maximum value of the mean of the differences..: 0.00e+00
+       Mean of the mean of the differences...........: 0.00e+00
+       Standard error of the mean of the differences.: 0.00e+00
+
+     ---------------------------------------------------------------------------
+
diff --git a/doc/en/scripts/simple_FunctionTest.rst b/doc/en/scripts/simple_FunctionTest.rst
new file mode 100644 (file)
index 0000000..74a376b
--- /dev/null
@@ -0,0 +1,16 @@
+.. index:: single: FunctionTest (exemple)
+
+Cet exemple décrit le test du bon fonctionnement d'un opérateur et que son
+appel se déroule de manière compatible avec son usage dans les algorithmes
+d'ADAO. Les information nécessaires sont minimales, à savoir ici un opérateur
+de type observation :math:`H` et un état :math:`\mathbf{x}^b` sur lequel le
+tester (nommé "*CheckingPoint*" pour le test).
+
+Le test est répété un nombre paramétrable de fois, et une statistique finale
+permet de vérifier rapidement le bon comportement de l'opérateur. Le diagnostic
+le plus simple consiste à vérifier, à la fin, l'ordre de grandeur des valeurs
+indiquées comme la moyenne des différences entre les sorties répétées et leur
+moyenne ("*mean of the differences between the outputs Y and their mean Ym*").
+Pour un opérateur normal, ces valeurs doivent être proches du zéro numérique.
+
+
diff --git a/doc/en/scripts/simple_ParallelFunctionTest.py b/doc/en/scripts/simple_ParallelFunctionTest.py
new file mode 100644 (file)
index 0000000..dde6bc1
--- /dev/null
@@ -0,0 +1,26 @@
+# -*- coding: utf-8 -*-
+#
+import numpy
+from adao import adaoBuilder
+#
+def SomeOperator( x ):
+    return numpy.dot(numpy.eye(x.size), numpy.ravel(x))
+#
+case = adaoBuilder.New('')
+case.setAlgorithmParameters(
+    Algorithm='ParallelFunctionTest',
+    Parameters={
+        'NumberOfRepetition' : 50,
+        'NumberOfPrintedDigits' : 2,
+        "ShowElementarySummary":False,
+        },
+    )
+case.setCheckingPoint( Vector = range(30) )
+case.setObservationOperator(
+    OneFunction = SomeOperator,
+    Parameters  = {
+        "EnableMultiProcessingInEvaluation":True,
+        "NumberOfProcesses":5,
+        },
+    )
+case.execute()
diff --git a/doc/en/scripts/simple_ParallelFunctionTest.res b/doc/en/scripts/simple_ParallelFunctionTest.res
new file mode 100644 (file)
index 0000000..829da09
--- /dev/null
@@ -0,0 +1,59 @@
+===> Information before launching:
+     -----------------------------
+     Characteristics of input vector X, internally converted:
+       Type...............: <class 'numpy.ndarray'>
+       Lenght of vector...: 30
+       Minimum value......: 0.00e+00
+       Maximum value......: 2.90e+01
+       Mean of vector.....: 1.45e+01
+       Standard error.....: 8.66e+00
+       L2 norm of vector..: 9.25e+01
+
+     ---------------------------------------------------------------------------
+
+===> Beginning of repeated evaluation, without activating debug
+
+     ---------------------------------------------------------------------------
+
+===> Launching operator parallel evaluation for 50 states
+
+
+===> End of operator parallel evaluation for 50 states
+
+     ---------------------------------------------------------------------------
+
+===> End of repeated evaluation, without deactivating debug
+
+     ---------------------------------------------------------------------------
+
+===> Launching statistical summary calculation for 50 states
+
+     ---------------------------------------------------------------------------
+
+===> Statistical analysis of the outputs obtained through parallel repeated evaluations
+
+     (Remark: numbers that are (about) under 2e-16 represent 0 to machine precision)
+
+     Characteristics of the whole set of outputs Y:
+       Number of evaluations.........................: 50
+       Minimum value of the whole set of outputs.....: 0.00e+00
+       Maximum value of the whole set of outputs.....: 2.90e+01
+       Mean of vector of the whole set of outputs....: 1.45e+01
+       Standard error of the whole set of outputs....: 8.66e+00
+
+     Characteristics of the vector Ym, mean of the outputs Y:
+       Size of the mean of the outputs...............: 30
+       Minimum value of the mean of the outputs......: 0.00e+00
+       Maximum value of the mean of the outputs......: 2.90e+01
+       Mean of the mean of the outputs...............: 1.45e+01
+       Standard error of the mean of the outputs.....: 8.66e+00
+
+     Characteristics of the mean of the differences between the outputs Y and their mean Ym:
+       Size of the mean of the differences...........: 30
+       Minimum value of the mean of the differences..: 0.00e+00
+       Maximum value of the mean of the differences..: 0.00e+00
+       Mean of the mean of the differences...........: 0.00e+00
+       Standard error of the mean of the differences.: 0.00e+00
+
+     ---------------------------------------------------------------------------
+
diff --git a/doc/en/scripts/simple_ParallelFunctionTest.rst b/doc/en/scripts/simple_ParallelFunctionTest.rst
new file mode 100644 (file)
index 0000000..b068a99
--- /dev/null
@@ -0,0 +1,14 @@
+.. index:: single: FunctionTest (example)
+
+This example describes the test of the correct operation of an operator and
+that its call is compatible with its use in the ADAO algorithms. The necessary
+information are minimal, namely here an operator of type observation :math:`H`
+and a state :math:`\mathbf{x}^b` on which to test it (named "*CheckingPoint*"
+for the test).
+
+The test is repeated a customizable number of times, and a final statistic
+allows to quickly check the good behavior of the operator. The simplest
+diagnostic consists in checking, at the end, the order of magnitude of the
+values indicated as the average of the differences between the repeated outputs
+Y and their mean Ym*. For a typical operator, these values should be close to
+the numerical zero.
index 51b2908f16d154cfde6c7241aa758f522aa0e6c3..22bf549b0e49370c92683cc48bd5310d58922817 100644 (file)
@@ -30,6 +30,12 @@ mfp = PlatformInfo.PlatformInfo().MaximumPrecision()
 class ElementaryAlgorithm(BasicObjects.Algorithm):
     def __init__(self):
         BasicObjects.Algorithm.__init__(self, "FUNCTIONTEST")
+        self.defineRequiredParameter(
+            name     = "ShowElementarySummary",
+            default  = True,
+            typecast = bool,
+            message  = "Calcule et affiche un résumé à chaque évaluation élémentaire",
+            )
         self.defineRequiredParameter(
             name     = "NumberOfPrintedDigits",
             default  = 5,
@@ -81,6 +87,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         Xn = copy.copy( Xb )
         #
         # ----------
+        __s = self._parameters["ShowElementarySummary"]
         __marge =  5*u" "
         _p = self._parameters["NumberOfPrintedDigits"]
         if len(self._parameters["ResultTitle"]) > 0:
@@ -103,12 +110,13 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         msgs += ("       L2 norm of vector..: %."+str(_p)+"e\n")%numpy.linalg.norm( Xn )
         print(msgs)
         #
+        print("     %s\n"%("-"*75,))
         if self._parameters["SetDebug"]:
             CUR_LEVEL = logging.getLogger().getEffectiveLevel()
             logging.getLogger().setLevel(logging.DEBUG)
-            print("===> Beginning of evaluation, activating debug\n")
+            print("===> Beginning of repeated evaluation, activating debug\n")
         else:
-            print("===> Beginning of evaluation, without activating debug\n")
+            print("===> Beginning of repeated evaluation, without activating debug\n")
         #
         # ----------
         HO["Direct"].disableAvoidingRedundancy()
@@ -117,25 +125,27 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         for i in range(self._parameters["NumberOfRepetition"]):
             if self._toStore("CurrentState"):
                 self.StoredVariables["CurrentState"].store( numpy.ravel(Xn) )
-            print("     %s\n"%("-"*75,))
-            if self._parameters["NumberOfRepetition"] > 1:
-                print("===> Repetition step number %i on a total of %i\n"%(i+1,self._parameters["NumberOfRepetition"]))
-            print("===> Launching direct operator evaluation\n")
+            if __s:
+                print("     %s\n"%("-"*75,))
+                if self._parameters["NumberOfRepetition"] > 1:
+                    print("===> Repetition step number %i on a total of %i\n"%(i+1,self._parameters["NumberOfRepetition"]))
+                print("===> Launching operator sequential evaluation\n")
             #
             Yn = Hm( Xn )
             #
-            print("\n===> End of direct operator evaluation\n")
-            #
-            msgs  = ("===> Information after evaluation:\n")
-            msgs += ("\n     Characteristics of simulated output vector Y=H(X), to compare to others:\n")
-            msgs += ("       Type...............: %s\n")%type( Yn )
-            msgs += ("       Lenght of vector...: %i\n")%max(numpy.ravel( Yn ).shape)
-            msgs += ("       Minimum value......: %."+str(_p)+"e\n")%numpy.min( Yn )
-            msgs += ("       Maximum value......: %."+str(_p)+"e\n")%numpy.max( Yn )
-            msgs += ("       Mean of vector.....: %."+str(_p)+"e\n")%numpy.mean( Yn, dtype=mfp )
-            msgs += ("       Standard error.....: %."+str(_p)+"e\n")%numpy.std( Yn, dtype=mfp )
-            msgs += ("       L2 norm of vector..: %."+str(_p)+"e\n")%numpy.linalg.norm( Yn )
-            print(msgs)
+            if __s:
+                print("\n===> End of operator sequential evaluation\n")
+                #
+                msgs  = ("===> Information after evaluation:\n")
+                msgs += ("\n     Characteristics of simulated output vector Y=H(X), to compare to others:\n")
+                msgs += ("       Type...............: %s\n")%type( Yn )
+                msgs += ("       Lenght of vector...: %i\n")%max(numpy.ravel( Yn ).shape)
+                msgs += ("       Minimum value......: %."+str(_p)+"e\n")%numpy.min( Yn )
+                msgs += ("       Maximum value......: %."+str(_p)+"e\n")%numpy.max( Yn )
+                msgs += ("       Mean of vector.....: %."+str(_p)+"e\n")%numpy.mean( Yn, dtype=mfp )
+                msgs += ("       Standard error.....: %."+str(_p)+"e\n")%numpy.std( Yn, dtype=mfp )
+                msgs += ("       L2 norm of vector..: %."+str(_p)+"e\n")%numpy.linalg.norm( Yn )
+                print(msgs)
             if self._toStore("SimulatedObservationAtCurrentState"):
                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(Yn) )
             #
@@ -148,12 +158,14 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         #
         print("     %s\n"%("-"*75,))
         if self._parameters["SetDebug"]:
-            print("===> End of evaluation, deactivating debug if necessary\n")
+            print("===> End of repeated evaluation, deactivating debug if necessary\n")
             logging.getLogger().setLevel(CUR_LEVEL)
         else:
-            print("===> End of evaluation, without deactivating debug\n")
+            print("===> End of repeated evaluation, without deactivating debug\n")
         #
         if self._parameters["NumberOfRepetition"] > 1:
+            print("     %s\n"%("-"*75,))
+            print("===> Launching statistical summary calculation for %i states\n"%self._parameters["NumberOfRepetition"])
             msgs  = ("     %s\n"%("-"*75,))
             msgs += ("\n===> Statistical analysis of the outputs obtained through sequential repeated evaluations\n")
             msgs += ("\n     (Remark: numbers that are (about) under %.0e represent 0 to machine precision)\n"%mpr)
index 4f80e054e7d244d092641298625134c79793a096..4364bd9c6af41152e66054c515b02e67aea06153 100644 (file)
@@ -30,6 +30,12 @@ mfp = PlatformInfo.PlatformInfo().MaximumPrecision()
 class ElementaryAlgorithm(BasicObjects.Algorithm):
     def __init__(self):
         BasicObjects.Algorithm.__init__(self, "PARALLELFUNCTIONTEST")
+        self.defineRequiredParameter(
+            name     = "ShowElementarySummary",
+            default  = True,
+            typecast = bool,
+            message  = "Calcule et affiche un résumé à chaque évaluation élémentaire",
+            )
         self.defineRequiredParameter(
             name     = "NumberOfPrintedDigits",
             default  = 5,
@@ -81,6 +87,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         Xn = copy.copy( Xb )
         #
         # ----------
+        __s = self._parameters["ShowElementarySummary"]
         __marge =  5*u" "
         _p = self._parameters["NumberOfPrintedDigits"]
         if len(self._parameters["ResultTitle"]) > 0:
@@ -95,7 +102,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         msgs += ("     -----------------------------\n")
         msgs += ("     Characteristics of input vector X, internally converted:\n")
         msgs += ("       Type...............: %s\n")%type( Xn )
-        msgs += ("       Lenght of vector...: %i\n")%max(numpy.asarray( Xn ).shape)
+        msgs += ("       Lenght of vector...: %i\n")%max(numpy.ravel( Xn ).shape)
         msgs += ("       Minimum value......: %."+str(_p)+"e\n")%numpy.min( Xn )
         msgs += ("       Maximum value......: %."+str(_p)+"e\n")%numpy.max( Xn )
         msgs += ("       Mean of vector.....: %."+str(_p)+"e\n")%numpy.mean( Xn, dtype=mfp )
@@ -107,52 +114,62 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         if self._parameters["SetDebug"]:
             CUR_LEVEL = logging.getLogger().getEffectiveLevel()
             logging.getLogger().setLevel(logging.DEBUG)
-            print("===> Beginning of evaluation, activating debug\n")
+            print("===> Beginning of repeated evaluation, activating debug\n")
         else:
-            print("===> Beginning of evaluation, without activating debug\n")
+            print("===> Beginning of repeated evaluation, without activating debug\n")
         #
-        Xs = []
+        # ----------
+        HO["Direct"].disableAvoidingRedundancy()
+        # ----------
         Ys = []
+        print("     %s\n"%("-"*75,))
+        Xs = []
         for i in range(self._parameters["NumberOfRepetition"]):
             if self._toStore("CurrentState"):
                 self.StoredVariables["CurrentState"].store( numpy.ravel(Xn) )
             Xs.append( Xn )
+        print("===> Launching operator parallel evaluation for %i states\n"%self._parameters["NumberOfRepetition"])
         #
-        # ----------
-        HO["Direct"].disableAvoidingRedundancy()
-        # ----------
         Ys = Hm( Xs, argsAsSerie = True )
+        #
+        print("\n===> End of operator parallel evaluation for %i states\n"%self._parameters["NumberOfRepetition"])
+        #
         # ----------
         HO["Direct"].enableAvoidingRedundancy()
         # ----------
         #
-        print()
+        print("     %s\n"%("-"*75,))
         if self._parameters["SetDebug"]:
-            print("===> End of evaluation, deactivating debug\n")
+            print("===> End of repeated evaluation, deactivating debug if necessary\n")
             logging.getLogger().setLevel(CUR_LEVEL)
         else:
-            print("===> End of evaluation, without deactivating debug\n")
+            print("===> End of repeated evaluation, without deactivating debug\n")
         #
-        for i in range(self._parameters["NumberOfRepetition"]):
-            print("     %s\n"%("-"*75,))
-            if self._parameters["NumberOfRepetition"] > 1:
-                print("===> Repetition step number %i on a total of %i\n"%(i+1,self._parameters["NumberOfRepetition"]))
-            #
-            Yn = Ys[i]
-            msgs  = ("===> Information after evaluation:\n")
-            msgs += ("\n     Characteristics of simulated output vector Y=H(X), to compare to others:\n")
-            msgs += ("       Type...............: %s\n")%type( Yn )
-            msgs += ("       Lenght of vector...: %i\n")%max(numpy.asarray( Yn ).shape)
-            msgs += ("       Minimum value......: %."+str(_p)+"e\n")%numpy.min( Yn )
-            msgs += ("       Maximum value......: %."+str(_p)+"e\n")%numpy.max( Yn )
-            msgs += ("       Mean of vector.....: %."+str(_p)+"e\n")%numpy.mean( Yn, dtype=mfp )
-            msgs += ("       Standard error.....: %."+str(_p)+"e\n")%numpy.std( Yn, dtype=mfp )
-            msgs += ("       L2 norm of vector..: %."+str(_p)+"e\n")%numpy.linalg.norm( Yn )
-            print(msgs)
-            if self._toStore("SimulatedObservationAtCurrentState"):
-                self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(Yn) )
+        if __s or self._toStore("SimulatedObservationAtCurrentState"):
+            for i in range(self._parameters["NumberOfRepetition"]):
+                if __s:
+                    print("     %s\n"%("-"*75,))
+                    if self._parameters["NumberOfRepetition"] > 1:
+                        print("===> Repetition step number %i on a total of %i\n"%(i+1,self._parameters["NumberOfRepetition"]))
+                #
+                Yn = Ys[i]
+                if __s:
+                    msgs  = ("===> Information after evaluation:\n")
+                    msgs += ("\n     Characteristics of simulated output vector Y=H(X), to compare to others:\n")
+                    msgs += ("       Type...............: %s\n")%type( Yn )
+                    msgs += ("       Lenght of vector...: %i\n")%max(numpy.ravel( Yn ).shape)
+                    msgs += ("       Minimum value......: %."+str(_p)+"e\n")%numpy.min( Yn )
+                    msgs += ("       Maximum value......: %."+str(_p)+"e\n")%numpy.max( Yn )
+                    msgs += ("       Mean of vector.....: %."+str(_p)+"e\n")%numpy.mean( Yn, dtype=mfp )
+                    msgs += ("       Standard error.....: %."+str(_p)+"e\n")%numpy.std( Yn, dtype=mfp )
+                    msgs += ("       L2 norm of vector..: %."+str(_p)+"e\n")%numpy.linalg.norm( Yn )
+                    print(msgs)
+                if self._toStore("SimulatedObservationAtCurrentState"):
+                    self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(Yn) )
         #
         if self._parameters["NumberOfRepetition"] > 1:
+            print("     %s\n"%("-"*75,))
+            print("===> Launching statistical summary calculation for %i states\n"%self._parameters["NumberOfRepetition"])
             msgs  = ("     %s\n"%("-"*75,))
             msgs += ("\n===> Statistical analysis of the outputs obtained through parallel repeated evaluations\n")
             msgs += ("\n     (Remark: numbers that are (about) under %.0e represent 0 to machine precision)\n"%mpr)