From 12ac4ed8fd2227a25733dece87d002f524bc5655 Mon Sep 17 00:00:00 2001 From: Jean-Philippe ARGAUD Date: Fri, 31 Mar 2023 16:00:40 +0200 Subject: [PATCH] Example update for OSCT --- doc/en/examples.rst | 1 + ...hm_ObservationSimulationComparisonTest.rst | 16 +++ ...le_ObservationSimulationComparisonTest1.py | 20 +++ ...e_ObservationSimulationComparisonTest1.res | 122 ++++++++++++++++++ ...e_ObservationSimulationComparisonTest1.rst | 19 +++ doc/fr/examples.rst | 1 + ...hm_ObservationSimulationComparisonTest.rst | 16 +++ ...le_ObservationSimulationComparisonTest1.py | 20 +++ ...e_ObservationSimulationComparisonTest1.res | 122 ++++++++++++++++++ ...e_ObservationSimulationComparisonTest1.rst | 19 +++ .../ObservationSimulationComparisonTest.py | 44 ++++--- 11 files changed, 383 insertions(+), 17 deletions(-) create mode 100644 doc/en/scripts/simple_ObservationSimulationComparisonTest1.py create mode 100644 doc/en/scripts/simple_ObservationSimulationComparisonTest1.res create mode 100644 doc/en/scripts/simple_ObservationSimulationComparisonTest1.rst create mode 100644 doc/fr/scripts/simple_ObservationSimulationComparisonTest1.py create mode 100644 doc/fr/scripts/simple_ObservationSimulationComparisonTest1.res create mode 100644 doc/fr/scripts/simple_ObservationSimulationComparisonTest1.rst diff --git a/doc/en/examples.rst b/doc/en/examples.rst index c3feccc..556e239 100644 --- a/doc/en/examples.rst +++ b/doc/en/examples.rst @@ -56,6 +56,7 @@ Checking algorithms uses #. :ref:`Examples with the "AdjointTest" check` #. :ref:`Examples with the "ControledFunctionTest"` #. :ref:`Examples with the "FunctionTest" check` +#. :ref:`Examples with the "ObservationSimulationComparisonTest" check` #. :ref:`Examples with the "ParallelFunctionTest" check` Advanced uses diff --git a/doc/en/ref_algorithm_ObservationSimulationComparisonTest.rst b/doc/en/ref_algorithm_ObservationSimulationComparisonTest.rst index 2b5d5b1..852833d 100644 --- a/doc/en/ref_algorithm_ObservationSimulationComparisonTest.rst +++ b/doc/en/ref_algorithm_ObservationSimulationComparisonTest.rst @@ -97,6 +97,7 @@ StoreSupplementaryCalculations "CurrentState", "Innovation", "InnovationAtCurrentState", + "OMB", "SimulatedObservationAtCurrentState", ]. @@ -123,11 +124,26 @@ StoreSupplementaryCalculations .. include:: snippets/InnovationAtCurrentState.rst +.. include:: snippets/OMB.rst + .. include:: snippets/SimulatedObservationAtCurrentState.rst .. ------------------------------------ .. .. _section_ref_algorithm_ObservationSimulationComparisonTest_examples: +.. include:: snippets/Header2Algo09.rst + +.. --------- .. +.. include:: scripts/simple_ObservationSimulationComparisonTest1.rst + +.. literalinclude:: scripts/simple_ObservationSimulationComparisonTest1.py + +.. include:: snippets/Header2Algo10.rst + +.. literalinclude:: scripts/simple_ObservationSimulationComparisonTest1.res + :language: none + +.. ------------------------------------ .. .. include:: snippets/Header2Algo06.rst - :ref:`section_ref_algorithm_FunctionTest` diff --git a/doc/en/scripts/simple_ObservationSimulationComparisonTest1.py b/doc/en/scripts/simple_ObservationSimulationComparisonTest1.py new file mode 100644 index 0000000..8894fb2 --- /dev/null +++ b/doc/en/scripts/simple_ObservationSimulationComparisonTest1.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# +from numpy import array, eye, ones +from adao import adaoBuilder +case = adaoBuilder.New() +case.set("CheckingPoint", Vector = array([0., 1., 2.]) ) +case.set("Observation", Vector = ones(3) ) +case.set("ObservationOperator", Matrix = 1/3 * eye(3) ) +case.setAlgorithmParameters( + Algorithm='ObservationSimulationComparisonTest', + Parameters={ + 'NumberOfRepetition' : 5, + 'NumberOfPrintedDigits' : 2, + 'ShowElementarySummary':False, + 'StoreSupplementaryCalculations': [ + 'CostFunctionJ', + ], + }, + ) +case.execute() diff --git a/doc/en/scripts/simple_ObservationSimulationComparisonTest1.res b/doc/en/scripts/simple_ObservationSimulationComparisonTest1.res new file mode 100644 index 0000000..cc7b45b --- /dev/null +++ b/doc/en/scripts/simple_ObservationSimulationComparisonTest1.res @@ -0,0 +1,122 @@ + + OBSERVATIONSIMULATIONCOMPARISONTEST + =================================== + + This test allows to analyze the (repetition of the) launch of some + given simulation operator F, applied to one single vector argument x, + and its comparison to observations or measures y through the innovation + difference OMB = y - F(x) (Observation minus evaluation at Background) + and (if required) the data assimilation standard cost function J. + The output shows simple statistics related to its successful execution, + or related to the similarities of repetition of its execution. + +===> Information before launching: + ----------------------------- + + Characteristics of input vector X, internally converted: + Type...............: + Length of vector...: 3 + Minimum value......: 0.00e+00 + Maximum value......: 2.00e+00 + Mean of vector.....: 1.00e+00 + Standard error.....: 8.16e-01 + L2 norm of vector..: 2.24e+00 + + Characteristics of input vector of observations Yobs, internally converted: + Type...............: + Length of vector...: 3 + Minimum value......: 1.00e+00 + Maximum value......: 1.00e+00 + Mean of vector.....: 1.00e+00 + Standard error.....: 0.00e+00 + L2 norm of vector..: 1.73e+00 + + --------------------------------------------------------------------------- + +===> Beginning of repeated evaluation, without activating debug + + --------------------------------------------------------------------------- + +===> End of repeated evaluation, without deactivating debug + + --------------------------------------------------------------------------- + +===> Launching statistical summary calculation for 5 states + + --------------------------------------------------------------------------- + +===> Statistical analysis of the outputs obtained through sequential repeated evaluations + + (Remark: numbers that are (about) under 2e-16 represent 0 to machine precision) + + Number of evaluations...........................: 5 + + Characteristics of the whole set of outputs Y: + Size of each of the outputs...................: 3 + Minimum value of the whole set of outputs.....: 0.00e+00 + Maximum value of the whole set of outputs.....: 6.67e-01 + Mean of vector of the whole set of outputs....: 3.33e-01 + Standard error of the whole set of outputs....: 2.72e-01 + + Characteristics of the vector Ym, mean of the outputs Y: + Size of the mean of the outputs...............: 3 + Minimum value of the mean of the outputs......: 0.00e+00 + Maximum value of the mean of the outputs......: 6.67e-01 + Mean of the mean of the outputs...............: 3.33e-01 + Standard error of the mean of the outputs.....: 2.72e-01 + + Characteristics of the mean of the differences between the outputs Y and their mean Ym: + Size of the mean of the differences...........: 3 + Minimum value of the mean of the differences..: 0.00e+00 + Maximum value of the mean of the differences..: 0.00e+00 + Mean of the mean of the differences...........: 0.00e+00 + Standard error of the mean of the differences.: 0.00e+00 + + --------------------------------------------------------------------------- + +===> Statistical analysis of the OMB differences obtained through sequential repeated evaluations + + (Remark: numbers that are (about) under 2e-16 represent 0 to machine precision) + + Number of evaluations...........................: 5 + + Characteristics of the whole set of OMB differences: + Size of each of the outputs...................: 3 + Minimum value of the whole set of differences.: 3.33e-01 + Maximum value of the whole set of differences.: 1.00e+00 + Mean of vector of the whole set of differences: 6.67e-01 + Standard error of the whole set of differences: 2.72e-01 + + Characteristics of the vector Dm, mean of the OMB differences: + Size of the mean of the differences...........: 3 + Minimum value of the mean of the differences..: 3.33e-01 + Maximum value of the mean of the differences..: 1.00e+00 + Mean of the mean of the differences...........: 6.67e-01 + Standard error of the mean of the differences.: 2.72e-01 + + Characteristics of the mean of the differences between the OMB differences and their mean Dm: + Size of the mean of the differences...........: 3 + Minimum value of the mean of the differences..: 0.00e+00 + Maximum value of the mean of the differences..: 0.00e+00 + Mean of the mean of the differences...........: 0.00e+00 + Standard error of the mean of the differences.: 0.00e+00 + + --------------------------------------------------------------------------- + +===> Statistical analysis of the cost function J values obtained through sequential repeated evaluations + + Number of evaluations...........................: 5 + + Characteristics of the whole set of data assimilation cost function J values: + Minimum value of the whole set of J...........: 7.78e-01 + Maximum value of the whole set of J...........: 7.78e-01 + Mean of vector of the whole set of J..........: 7.78e-01 + Standard error of the whole set of J..........: 0.00e+00 + (Remark: variations of the cost function J only come from the observation part Jo of J) + + --------------------------------------------------------------------------- + + End of the "OBSERVATIONSIMULATIONCOMPARISONTEST" verification + + --------------------------------------------------------------------------- + diff --git a/doc/en/scripts/simple_ObservationSimulationComparisonTest1.rst b/doc/en/scripts/simple_ObservationSimulationComparisonTest1.rst new file mode 100644 index 0000000..ffa290e --- /dev/null +++ b/doc/en/scripts/simple_ObservationSimulationComparisonTest1.rst @@ -0,0 +1,19 @@ +.. index:: single: ObservationSimulationComparisonTest (example) + +This example analyzes the (repeated) running of a simulation operator +:math:`\mathbf{F}` explicitly given in matrix form (described for the test by +the observation command "*ObservationOperator*"), applied to a particular state +:math: `\mathbf{x}` on which to test (described for the test by the +"*CheckingPoint*" command), compared to measurements :math:`\mathbf{y}` +(described for the test by the "*Observation*" command) by the difference OMB = +y - F(x) (Observation minus evaluation at Background) and the standard data +assimilation cost function J. + +The test is repeated a configurable number of times, and a final statistic +allows to quickly check the good behavior of the operator. The simplest +diagnostic consists in checking, at the very end of the display, the order of +magnitude of the variations of the values indicated as the average of the +differences between the repeated outputs and their average, under the part +entitled "*Launching statistical summary calculation for 5 states*". For a +satisfactory operator, the values of differences from the mean and the standard +deviations should be close to the numerical zero. diff --git a/doc/fr/examples.rst b/doc/fr/examples.rst index 041f5e4..7774381 100644 --- a/doc/fr/examples.rst +++ b/doc/fr/examples.rst @@ -57,6 +57,7 @@ Utilisations d'algorithmes de vérification #. :ref:`Exemples de vérification avec "AdjointTest"` #. :ref:`Exemples de vérification avec "ControledFunctionTest"` #. :ref:`Exemples de vérification avec "FunctionTest"` +#. :ref:`Exemples de vérification avec "ObservationSimulationComparisonTest"` #. :ref:`Exemples de vérification avec "ParallelFunctionTest"` Utilisations avancées diff --git a/doc/fr/ref_algorithm_ObservationSimulationComparisonTest.rst b/doc/fr/ref_algorithm_ObservationSimulationComparisonTest.rst index c420978..d1089bb 100644 --- a/doc/fr/ref_algorithm_ObservationSimulationComparisonTest.rst +++ b/doc/fr/ref_algorithm_ObservationSimulationComparisonTest.rst @@ -99,6 +99,7 @@ StoreSupplementaryCalculations "CurrentState", "Innovation", "InnovationAtCurrentState", + "OMB", "SimulatedObservationAtCurrentState", ]. @@ -125,11 +126,26 @@ StoreSupplementaryCalculations .. include:: snippets/InnovationAtCurrentState.rst +.. include:: snippets/OMB.rst + .. include:: snippets/SimulatedObservationAtCurrentState.rst .. ------------------------------------ .. .. _section_ref_algorithm_ObservationSimulationComparisonTest_examples: +.. include:: snippets/Header2Algo09.rst + +.. --------- .. +.. include:: scripts/simple_ObservationSimulationComparisonTest1.rst + +.. literalinclude:: scripts/simple_ObservationSimulationComparisonTest1.py + +.. include:: snippets/Header2Algo10.rst + +.. literalinclude:: scripts/simple_ObservationSimulationComparisonTest1.res + :language: none + +.. ------------------------------------ .. .. include:: snippets/Header2Algo06.rst - :ref:`section_ref_algorithm_FunctionTest` diff --git a/doc/fr/scripts/simple_ObservationSimulationComparisonTest1.py b/doc/fr/scripts/simple_ObservationSimulationComparisonTest1.py new file mode 100644 index 0000000..8894fb2 --- /dev/null +++ b/doc/fr/scripts/simple_ObservationSimulationComparisonTest1.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# +from numpy import array, eye, ones +from adao import adaoBuilder +case = adaoBuilder.New() +case.set("CheckingPoint", Vector = array([0., 1., 2.]) ) +case.set("Observation", Vector = ones(3) ) +case.set("ObservationOperator", Matrix = 1/3 * eye(3) ) +case.setAlgorithmParameters( + Algorithm='ObservationSimulationComparisonTest', + Parameters={ + 'NumberOfRepetition' : 5, + 'NumberOfPrintedDigits' : 2, + 'ShowElementarySummary':False, + 'StoreSupplementaryCalculations': [ + 'CostFunctionJ', + ], + }, + ) +case.execute() diff --git a/doc/fr/scripts/simple_ObservationSimulationComparisonTest1.res b/doc/fr/scripts/simple_ObservationSimulationComparisonTest1.res new file mode 100644 index 0000000..cc7b45b --- /dev/null +++ b/doc/fr/scripts/simple_ObservationSimulationComparisonTest1.res @@ -0,0 +1,122 @@ + + OBSERVATIONSIMULATIONCOMPARISONTEST + =================================== + + This test allows to analyze the (repetition of the) launch of some + given simulation operator F, applied to one single vector argument x, + and its comparison to observations or measures y through the innovation + difference OMB = y - F(x) (Observation minus evaluation at Background) + and (if required) the data assimilation standard cost function J. + The output shows simple statistics related to its successful execution, + or related to the similarities of repetition of its execution. + +===> Information before launching: + ----------------------------- + + Characteristics of input vector X, internally converted: + Type...............: + Length of vector...: 3 + Minimum value......: 0.00e+00 + Maximum value......: 2.00e+00 + Mean of vector.....: 1.00e+00 + Standard error.....: 8.16e-01 + L2 norm of vector..: 2.24e+00 + + Characteristics of input vector of observations Yobs, internally converted: + Type...............: + Length of vector...: 3 + Minimum value......: 1.00e+00 + Maximum value......: 1.00e+00 + Mean of vector.....: 1.00e+00 + Standard error.....: 0.00e+00 + L2 norm of vector..: 1.73e+00 + + --------------------------------------------------------------------------- + +===> Beginning of repeated evaluation, without activating debug + + --------------------------------------------------------------------------- + +===> End of repeated evaluation, without deactivating debug + + --------------------------------------------------------------------------- + +===> Launching statistical summary calculation for 5 states + + --------------------------------------------------------------------------- + +===> Statistical analysis of the outputs obtained through sequential repeated evaluations + + (Remark: numbers that are (about) under 2e-16 represent 0 to machine precision) + + Number of evaluations...........................: 5 + + Characteristics of the whole set of outputs Y: + Size of each of the outputs...................: 3 + Minimum value of the whole set of outputs.....: 0.00e+00 + Maximum value of the whole set of outputs.....: 6.67e-01 + Mean of vector of the whole set of outputs....: 3.33e-01 + Standard error of the whole set of outputs....: 2.72e-01 + + Characteristics of the vector Ym, mean of the outputs Y: + Size of the mean of the outputs...............: 3 + Minimum value of the mean of the outputs......: 0.00e+00 + Maximum value of the mean of the outputs......: 6.67e-01 + Mean of the mean of the outputs...............: 3.33e-01 + Standard error of the mean of the outputs.....: 2.72e-01 + + Characteristics of the mean of the differences between the outputs Y and their mean Ym: + Size of the mean of the differences...........: 3 + Minimum value of the mean of the differences..: 0.00e+00 + Maximum value of the mean of the differences..: 0.00e+00 + Mean of the mean of the differences...........: 0.00e+00 + Standard error of the mean of the differences.: 0.00e+00 + + --------------------------------------------------------------------------- + +===> Statistical analysis of the OMB differences obtained through sequential repeated evaluations + + (Remark: numbers that are (about) under 2e-16 represent 0 to machine precision) + + Number of evaluations...........................: 5 + + Characteristics of the whole set of OMB differences: + Size of each of the outputs...................: 3 + Minimum value of the whole set of differences.: 3.33e-01 + Maximum value of the whole set of differences.: 1.00e+00 + Mean of vector of the whole set of differences: 6.67e-01 + Standard error of the whole set of differences: 2.72e-01 + + Characteristics of the vector Dm, mean of the OMB differences: + Size of the mean of the differences...........: 3 + Minimum value of the mean of the differences..: 3.33e-01 + Maximum value of the mean of the differences..: 1.00e+00 + Mean of the mean of the differences...........: 6.67e-01 + Standard error of the mean of the differences.: 2.72e-01 + + Characteristics of the mean of the differences between the OMB differences and their mean Dm: + Size of the mean of the differences...........: 3 + Minimum value of the mean of the differences..: 0.00e+00 + Maximum value of the mean of the differences..: 0.00e+00 + Mean of the mean of the differences...........: 0.00e+00 + Standard error of the mean of the differences.: 0.00e+00 + + --------------------------------------------------------------------------- + +===> Statistical analysis of the cost function J values obtained through sequential repeated evaluations + + Number of evaluations...........................: 5 + + Characteristics of the whole set of data assimilation cost function J values: + Minimum value of the whole set of J...........: 7.78e-01 + Maximum value of the whole set of J...........: 7.78e-01 + Mean of vector of the whole set of J..........: 7.78e-01 + Standard error of the whole set of J..........: 0.00e+00 + (Remark: variations of the cost function J only come from the observation part Jo of J) + + --------------------------------------------------------------------------- + + End of the "OBSERVATIONSIMULATIONCOMPARISONTEST" verification + + --------------------------------------------------------------------------- + diff --git a/doc/fr/scripts/simple_ObservationSimulationComparisonTest1.rst b/doc/fr/scripts/simple_ObservationSimulationComparisonTest1.rst new file mode 100644 index 0000000..cfe83ca --- /dev/null +++ b/doc/fr/scripts/simple_ObservationSimulationComparisonTest1.rst @@ -0,0 +1,19 @@ +.. index:: single: ObservationSimulationComparisonTest (exemple) + +Cet exemple permet d'analyser le lancement (répété) d'un opérateur de +simulation :math:`\mathbf{F}` explicitement donné sous forme matricielle +(décrit pour le test par la commande d'observation "*ObservationOperator*"), +appliqué à un état particulier :math:`\mathbf{x}` sur lequel le tester (décrit +pour le test par la commande "*CheckingPoint*"), comparé à des mesures +:math:`\mathbf{y}` (décrit pour le test par la commande "*Observation*") par la +différence OMB = y - F(x) (Observation minus evaluation at Background) et la +fonction de coût standard d'assimilation des données J. + +Le test est répété un nombre paramétrable de fois, et une statistique finale +permet de vérifier rapidement le bon comportement de l'opérateur. Le diagnostic +le plus simple consiste à vérifier, à la toute fin de l'affichage, l'ordre de +grandeur des variations des valeurs indiquées comme la moyenne des différences +entre les sorties répétées et leur moyenne, sous la partie titrée "*Launching +statistical summary calculation for 5 states*". Pour un opérateur satisfaisant, +les valeurs de différences à la moyenne et les écarts-types doivent être +proches du zéro numérique. diff --git a/src/daComposant/daAlgorithms/ObservationSimulationComparisonTest.py b/src/daComposant/daAlgorithms/ObservationSimulationComparisonTest.py index 09b171f..bf234d4 100644 --- a/src/daComposant/daAlgorithms/ObservationSimulationComparisonTest.py +++ b/src/daComposant/daAlgorithms/ObservationSimulationComparisonTest.py @@ -73,6 +73,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): "CurrentState", "Innovation", "InnovationAtCurrentState", + "OMB", "SimulatedObservationAtCurrentState", ] ) @@ -127,7 +128,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): msgs += ("\n") msgs += (__marge + "This test allows to analyze the (repetition of the) launch of some\n") msgs += (__marge + "given simulation operator F, applied to one single vector argument x,\n") - msgs += (__marge + "and its (repeated) comparison to observations or measures y.\n") + msgs += (__marge + "and its comparison to observations or measures y through the innovation\n") + msgs += (__marge + "difference OMB = y - F(x) (Observation minus evaluation at Background)\n") + msgs += (__marge + "and (if required) the data assimilation standard cost function J.\n") msgs += (__marge + "The output shows simple statistics related to its successful execution,\n") msgs += (__marge + "or related to the similarities of repetition of its execution.\n") msgs += ("\n") @@ -194,6 +197,10 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # Dn = _Y0 - numpy.ravel( Yn ) # + if len(self._parameters["StoreSupplementaryCalculations"]) > 0: + J, Jb, Jo = CostFunction( X0, Yn ) + if self._toStore("CostFunctionJ"): + Js.append( J ) if __s: msgs = ("\n") # 2-2 msgs += (__flech + "End of operator sequential evaluation\n") @@ -209,7 +216,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): msgs += (__marge + " Standard error.....: %."+str(__p)+"e\n")%numpy.std( Yn, dtype=mfp ) msgs += (__marge + " L2 norm of vector..: %."+str(__p)+"e\n")%numpy.linalg.norm( Yn ) msgs += ("\n") - msgs += (__marge + "Characteristics of increment between observations Yobs and simulated output vector Y=F(X):\n") + msgs += (__marge + "Characteristics of OMB differences between observations Yobs and simulated output vector Y=F(X):\n") msgs += (__marge + " Type...............: %s\n")%type( Dn ) msgs += (__marge + " Length of vector...: %i\n")%max(numpy.ravel( Dn ).shape) msgs += (__marge + " Minimum value......: %."+str(__p)+"e\n")%numpy.min( Dn ) @@ -218,18 +225,19 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): msgs += (__marge + " Standard error.....: %."+str(__p)+"e\n")%numpy.std( Dn, dtype=mfp ) msgs += (__marge + " L2 norm of vector..: %."+str(__p)+"e\n")%numpy.linalg.norm( Dn ) if len(self._parameters["StoreSupplementaryCalculations"]) > 0: - J, Jb, Jo = CostFunction( X0, Yn ) if self._toStore("CostFunctionJ"): - Js.append( J ) msgs += ("\n") msgs += (__marge + " Cost function J....: %."+str(__p)+"e\n")%J msgs += (__marge + " Cost function Jb...: %."+str(__p)+"e\n")%Jb msgs += (__marge + " Cost function Jo...: %."+str(__p)+"e\n")%Jo + msgs += (__marge + " (Remark: the Jb background part of the cost function J is zero by hypothesis)\n") print(msgs) # 2-2 if self._toStore("SimulatedObservationAtCurrentState"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(Yn) ) if self._toStore("Innovation"): self.StoredVariables["Innovation"].store( Dn ) + if self._toStore("OMB"): + self.StoredVariables["OMB"].store( Dn ) if self._toStore("InnovationAtCurrentState"): self.StoredVariables["InnovationAtCurrentState"].store( Dn ) # @@ -296,30 +304,30 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): msgs += ("\n") msgs += (__marge + "%s\n"%("-"*75,)) msgs += ("\n") - msgs += (__flech + "Statistical analysis of the increments obtained through sequential repeated evaluations\n") + msgs += (__flech + "Statistical analysis of the OMB differences obtained through sequential repeated evaluations\n") msgs += ("\n") msgs += (__marge + "(Remark: numbers that are (about) under %.0e represent 0 to machine precision)\n"%mpr) msgs += ("\n") Dy = numpy.array( Ds ) msgs += (__marge + "Number of evaluations...........................: %i\n")%len( Ds ) msgs += ("\n") - msgs += (__marge + "Characteristics of the whole set of increments D:\n") + msgs += (__marge + "Characteristics of the whole set of OMB differences:\n") msgs += (__marge + " Size of each of the outputs...................: %i\n")%Ds[0].size - msgs += (__marge + " Minimum value of the whole set of increments..: %."+str(__p)+"e\n")%numpy.min( Dy ) - msgs += (__marge + " Maximum value of the whole set of increments..: %."+str(__p)+"e\n")%numpy.max( Dy ) - msgs += (__marge + " Mean of vector of the whole set of increments.: %."+str(__p)+"e\n")%numpy.mean( Dy, dtype=mfp ) - msgs += (__marge + " Standard error of the whole set of increments.: %."+str(__p)+"e\n")%numpy.std( Dy, dtype=mfp ) + msgs += (__marge + " Minimum value of the whole set of differences.: %."+str(__p)+"e\n")%numpy.min( Dy ) + msgs += (__marge + " Maximum value of the whole set of differences.: %."+str(__p)+"e\n")%numpy.max( Dy ) + msgs += (__marge + " Mean of vector of the whole set of differences: %."+str(__p)+"e\n")%numpy.mean( Dy, dtype=mfp ) + msgs += (__marge + " Standard error of the whole set of differences: %."+str(__p)+"e\n")%numpy.std( Dy, dtype=mfp ) msgs += ("\n") Dm = numpy.mean( numpy.array( Ds ), axis=0, dtype=mfp ) - msgs += (__marge + "Characteristics of the vector Dm, mean of the increments D:\n") - msgs += (__marge + " Size of the mean of the increments............: %i\n")%Dm.size - msgs += (__marge + " Minimum value of the mean of the increments...: %."+str(__p)+"e\n")%numpy.min( Dm ) - msgs += (__marge + " Maximum value of the mean of the increments...: %."+str(__p)+"e\n")%numpy.max( Dm ) - msgs += (__marge + " Mean of the mean of the increments............: %."+str(__p)+"e\n")%numpy.mean( Dm, dtype=mfp ) - msgs += (__marge + " Standard error of the mean of the increments..: %."+str(__p)+"e\n")%numpy.std( Dm, dtype=mfp ) + msgs += (__marge + "Characteristics of the vector Dm, mean of the OMB differences:\n") + msgs += (__marge + " Size of the mean of the differences...........: %i\n")%Dm.size + msgs += (__marge + " Minimum value of the mean of the differences..: %."+str(__p)+"e\n")%numpy.min( Dm ) + msgs += (__marge + " Maximum value of the mean of the differences..: %."+str(__p)+"e\n")%numpy.max( Dm ) + msgs += (__marge + " Mean of the mean of the differences...........: %."+str(__p)+"e\n")%numpy.mean( Dm, dtype=mfp ) + msgs += (__marge + " Standard error of the mean of the differences.: %."+str(__p)+"e\n")%numpy.std( Dm, dtype=mfp ) msgs += ("\n") De = numpy.mean( numpy.array( Ds ) - Dm, axis=0, dtype=mfp ) - msgs += (__marge + "Characteristics of the mean of the differences between the increments D and their mean Dm:\n") + msgs += (__marge + "Characteristics of the mean of the differences between the OMB differences and their mean Dm:\n") msgs += (__marge + " Size of the mean of the differences...........: %i\n")%De.size msgs += (__marge + " Minimum value of the mean of the differences..: %."+str(__p)+"e\n")%numpy.min( De ) msgs += (__marge + " Maximum value of the mean of the differences..: %."+str(__p)+"e\n")%numpy.max( De ) @@ -330,6 +338,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): msgs += ("\n") Jj = numpy.array( Js ) msgs += (__marge + "%s\n\n"%("-"*75,)) + msgs += (__flech + "Statistical analysis of the cost function J values obtained through sequential repeated evaluations\n") + msgs += ("\n") msgs += (__marge + "Number of evaluations...........................: %i\n")%len( Js ) msgs += ("\n") msgs += (__marge + "Characteristics of the whole set of data assimilation cost function J values:\n") -- 2.39.2