]> SALOME platform Git repositories - modules/adao.git/commitdiff
Salome HOME
Minor updates for module behavior and tests
authorJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Sun, 3 Feb 2019 20:44:53 +0000 (21:44 +0100)
committerJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Sun, 3 Feb 2019 20:44:53 +0000 (21:44 +0100)
src/daComposant/daAlgorithms/3DVAR.py
src/daComposant/daAlgorithms/Blue.py
src/daComposant/daAlgorithms/ExtendedBlue.py
src/daComposant/daAlgorithms/LinearLeastSquares.py
src/daComposant/daAlgorithms/NonLinearLeastSquares.py
test/test6904/CTestTestfile.cmake
test/test6904/Definition_complete_de_cas_Blue.py [new file with mode: 0644]

index d6002758d3cb410587686b0500de3164b76f1a5c..0dc21b02b2516437c1b2ee61ab7f898397dbbf4b 100644 (file)
@@ -203,12 +203,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
             if self._toStore("SimulatedObservationAtCurrentOptimum"):
                 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
-            if self._toStore("CostFunctionJAtCurrentOptimum"):
-                self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
             if self._toStore("CostFunctionJbAtCurrentOptimum"):
                 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
             if self._toStore("CostFunctionJoAtCurrentOptimum"):
                 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
+            if self._toStore("CostFunctionJAtCurrentOptimum"):
+                self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
             return J
         #
         def GradientOfCostFunction(x):
index 6dcca39f8c24457b0115ac2ff9f6769e41025f57..f93ace8d941dbefa06ea360060ea3c88466d91b3 100644 (file)
@@ -46,8 +46,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 "APosterioriVariances",
                 "BMA",
                 "CostFunctionJ",
+                "CostFunctionJAtCurrentOptimum",
                 "CostFunctionJb",
+                "CostFunctionJbAtCurrentOptimum",
                 "CostFunctionJo",
+                "CostFunctionJoAtCurrentOptimum",
+                "CurrentOptimum",
                 "CurrentState",
                 "Innovation",
                 "MahalanobisConsistency",
@@ -56,6 +60,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 "SigmaBck2",
                 "SigmaObs2",
                 "SimulatedObservationAtBackground",
+                "SimulatedObservationAtCurrentOptimum",
                 "SimulatedObservationAtCurrentState",
                 "SimulatedObservationAtOptimum",
                 "SimulationQuantiles",
@@ -136,17 +141,22 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         # Calcul de la fonction coût
         # --------------------------
         if self._parameters["StoreInternalVariables"] or \
-            self._toStore("CostFunctionJ") or \
+            self._toStore("CostFunctionJ")  or self._toStore("CostFunctionJAtCurrentOptimum") or \
+            self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \
+            self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \
             self._toStore("OMA") or \
             self._toStore("SigmaObs2") or \
             self._toStore("MahalanobisConsistency") or \
+            self._toStore("SimulatedObservationAtCurrentOptimum") or \
             self._toStore("SimulatedObservationAtCurrentState") or \
             self._toStore("SimulatedObservationAtOptimum") or \
             self._toStore("SimulationQuantiles"):
             HXa = Hm * Xa
             oma = Y - HXa
         if self._parameters["StoreInternalVariables"] or \
-            self._toStore("CostFunctionJ") or \
+            self._toStore("CostFunctionJ")  or self._toStore("CostFunctionJAtCurrentOptimum") or \
+            self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \
+            self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \
             self._toStore("MahalanobisConsistency"):
             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
             Jo  = float( 0.5 * oma.T * RI * oma )
@@ -154,6 +164,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             self.StoredVariables["CostFunctionJb"].store( Jb )
             self.StoredVariables["CostFunctionJo"].store( Jo )
             self.StoredVariables["CostFunctionJ" ].store( J )
+            self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( Jb )
+            self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( Jo )
+            self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( J )
         #
         # Calcul de la covariance d'analyse
         # ---------------------------------
@@ -177,6 +190,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         # ---------------------------------------
         if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"):
             self.StoredVariables["CurrentState"].store( numpy.ravel(Xa) )
+        if self._toStore("CurrentOptimum"):
+            self.StoredVariables["CurrentOptimum"].store( numpy.ravel(Xa) )
         if self._toStore("Innovation"):
             self.StoredVariables["Innovation"].store( numpy.ravel(d) )
         if self._toStore("BMA"):
@@ -219,6 +234,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
         if self._toStore("SimulatedObservationAtCurrentState"):
             self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(HXa) )
+        if self._toStore("SimulatedObservationAtCurrentOptimum"):
+            self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( numpy.ravel(HXa) )
         if self._toStore("SimulatedObservationAtOptimum"):
             self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
         #
index c6856b9fe1d32fd886e36473c689f0d6ff968d54..1af6a094fcb6fcd558cc8472d869dbe750c5f728 100644 (file)
@@ -45,20 +45,25 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 "APosterioriStandardDeviations",
                 "APosterioriVariances",
                 "BMA",
-                "OMA",
-                "OMB",
-                "CurrentState",
                 "CostFunctionJ",
+                "CostFunctionJAtCurrentOptimum",
                 "CostFunctionJb",
+                "CostFunctionJbAtCurrentOptimum",
                 "CostFunctionJo",
+                "CostFunctionJoAtCurrentOptimum",
+                "CurrentOptimum",
+                "CurrentState",
                 "Innovation",
+                "MahalanobisConsistency",
+                "OMA",
+                "OMB",
                 "SigmaBck2",
                 "SigmaObs2",
-                "MahalanobisConsistency",
-                "SimulationQuantiles",
                 "SimulatedObservationAtBackground",
+                "SimulatedObservationAtCurrentOptimum",
                 "SimulatedObservationAtCurrentState",
                 "SimulatedObservationAtOptimum",
+                "SimulationQuantiles",
                 ]
             )
         self.defineRequiredParameter(
@@ -137,17 +142,22 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         # Calcul de la fonction coût
         # --------------------------
         if self._parameters["StoreInternalVariables"] or \
-            self._toStore("CostFunctionJ") or \
+            self._toStore("CostFunctionJ")  or self._toStore("CostFunctionJAtCurrentOptimum") or \
+            self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \
+            self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \
             self._toStore("OMA") or \
             self._toStore("SigmaObs2") or \
             self._toStore("MahalanobisConsistency") or \
+            self._toStore("SimulatedObservationAtCurrentOptimum") or \
             self._toStore("SimulatedObservationAtCurrentState") or \
             self._toStore("SimulatedObservationAtOptimum") or \
             self._toStore("SimulationQuantiles"):
             HXa  = numpy.matrix(numpy.ravel( H( Xa ) )).T
             oma = Y - HXa
         if self._parameters["StoreInternalVariables"] or \
-            self._toStore("CostFunctionJ") or \
+            self._toStore("CostFunctionJ")  or self._toStore("CostFunctionJAtCurrentOptimum") or \
+            self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \
+            self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \
             self._toStore("MahalanobisConsistency"):
             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
             Jo  = float( 0.5 * oma.T * RI * oma )
@@ -155,6 +165,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             self.StoredVariables["CostFunctionJb"].store( Jb )
             self.StoredVariables["CostFunctionJo"].store( Jo )
             self.StoredVariables["CostFunctionJ" ].store( J )
+            self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( Jb )
+            self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( Jo )
+            self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( J )
         #
         # Calcul de la covariance d'analyse
         # ---------------------------------
@@ -178,6 +191,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         # ---------------------------------------
         if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"):
             self.StoredVariables["CurrentState"].store( numpy.ravel(Xa) )
+        if self._toStore("CurrentOptimum"):
+            self.StoredVariables["CurrentOptimum"].store( numpy.ravel(Xa) )
         if self._toStore("Innovation"):
             self.StoredVariables["Innovation"].store( numpy.ravel(d) )
         if self._toStore("BMA"):
@@ -222,6 +237,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
         if self._toStore("SimulatedObservationAtCurrentState"):
             self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(HXa) )
+        if self._toStore("SimulatedObservationAtCurrentOptimum"):
+            self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( numpy.ravel(HXa) )
         if self._toStore("SimulatedObservationAtOptimum"):
             self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
         #
index b064ee783e49480c084bd9f281155c1df0921e10..15798c5014d05dc6a7212cbcddd80fe036abac51 100644 (file)
@@ -39,7 +39,20 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             default  = [],
             typecast = tuple,
             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
-            listval  = ["OMA", "CurrentState", "CostFunctionJ", "CostFunctionJb", "CostFunctionJo", "SimulatedObservationAtCurrentState", "SimulatedObservationAtOptimum"]
+            listval  = [
+                "CostFunctionJ",
+                "CostFunctionJAtCurrentOptimum",
+                "CostFunctionJb",
+                "CostFunctionJbAtCurrentOptimum",
+                "CostFunctionJo",
+                "CostFunctionJoAtCurrentOptimum",
+                "CurrentOptimum",
+                "CurrentState",
+                "OMA",
+                "SimulatedObservationAtCurrentOptimum",
+                "SimulatedObservationAtCurrentState",
+                "SimulatedObservationAtOptimum",
+                ]
             )
         self.requireInputArguments(
             mandatory= ("Y", "HO", "R"),
@@ -64,28 +77,43 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         # Calcul de la fonction coût
         # --------------------------
         if self._parameters["StoreInternalVariables"] or \
-            self._toStore("CostFunctionJ") or \
+            self._toStore("CostFunctionJ")  or self._toStore("CostFunctionJAtCurrentOptimum") or \
+            self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \
+            self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \
             self._toStore("OMA") or \
+            self._toStore("SimulatedObservationAtCurrentOptimum") or \
+            self._toStore("SimulatedObservationAtCurrentState") or \
             self._toStore("SimulatedObservationAtOptimum"):
             HXa = Hm * Xa
             oma = Y - HXa
         if self._parameters["StoreInternalVariables"] or \
-            self._toStore("CostFunctionJ"):
+            self._toStore("CostFunctionJ")  or self._toStore("CostFunctionJAtCurrentOptimum") or \
+            self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \
+            self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum"):
             Jb  = 0.
-            Jo  = 0.5 * oma.T * RI * oma
-            J   = float( Jb ) + float( Jo )
+            Jo  = float( 0.5 * oma.T * RI * oma )
+            J   = Jb + Jo
             self.StoredVariables["CostFunctionJb"].store( Jb )
             self.StoredVariables["CostFunctionJo"].store( Jo )
             self.StoredVariables["CostFunctionJ" ].store( J )
+            self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( Jb )
+            self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( Jo )
+            self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( J )
         #
         # Calculs et/ou stockages supplémentaires
         # ---------------------------------------
         if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"):
             self.StoredVariables["CurrentState"].store( numpy.ravel(Xa) )
+        if self._toStore("CurrentOptimum"):
+            self.StoredVariables["CurrentOptimum"].store( numpy.ravel(Xa) )
         if self._toStore("OMA"):
             self.StoredVariables["OMA"].store( numpy.ravel(oma) )
+        if self._toStore("SimulatedObservationAtBackground"):
+            self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) )
         if self._toStore("SimulatedObservationAtCurrentState"):
             self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(HXa) )
+        if self._toStore("SimulatedObservationAtCurrentOptimum"):
+            self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( numpy.ravel(HXa) )
         if self._toStore("SimulatedObservationAtOptimum"):
             self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
         #
index c1e76ac7d55bee7354d2a4459aca738fb21f97c7..acd77da3063ce763f592cbc03426d3b022609f06 100644 (file)
@@ -170,12 +170,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
             if self._toStore("SimulatedObservationAtCurrentOptimum"):
                 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
-            if self._toStore("CostFunctionJAtCurrentOptimum"):
-                self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
             if self._toStore("CostFunctionJbAtCurrentOptimum"):
                 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
             if self._toStore("CostFunctionJoAtCurrentOptimum"):
                 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
+            if self._toStore("CostFunctionJAtCurrentOptimum"):
+                self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
             return J
         #
         def GradientOfCostFunction(x):
index 0b0a3d1ab69a880bf87981830af2bb49b1762b76..552e54df3d8145f1d3765955e01d9b9abb4e7656 100644 (file)
@@ -21,6 +21,7 @@
 
 SET(TEST_NAMES
   Definition_complete_de_cas_3DVAR
+  Definition_complete_de_cas_Blue
   )
 
 FOREACH(tfile ${TEST_NAMES})
diff --git a/test/test6904/Definition_complete_de_cas_Blue.py b/test/test6904/Definition_complete_de_cas_Blue.py
new file mode 100644 (file)
index 0000000..3daa993
--- /dev/null
@@ -0,0 +1,167 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2008-2019 EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
+"Verification d'un exemple de la documentation"
+
+import sys
+import unittest
+import numpy
+
+# ==============================================================================
+#
+# Construction artificielle d'un exemple de donnees utilisateur
+# -------------------------------------------------------------
+alpha = 5.
+beta = 7
+gamma = 9.0
+#
+alphamin, alphamax = 0., 10.
+betamin,  betamax  = 3, 13
+gammamin, gammamax = 1.5, 15.5
+#
+def simulation(x):
+    "Fonction de simulation H pour effectuer Y=H(X)"
+    import numpy
+    __x = numpy.matrix(numpy.ravel(numpy.matrix(x))).T
+    __H = numpy.matrix("1 0 0;0 2 0;0 0 3; 1 2 3")
+    return __H * __x
+#
+def multisimulation( xserie ):
+    yserie = []
+    for x in xserie:
+        yserie.append( simulation( x ) )
+    return yserie
+#
+# Observations obtenues par simulation
+# ------------------------------------
+observations = simulation((2, 3, 4))
+
+# ==============================================================================
+class InTest(unittest.TestCase):
+    def test1(self):
+        print("""Exemple de la doc :
+
+        Exploitation independante des resultats d'un cas de calcul
+        ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+        """)
+        #
+        import numpy
+        from adao import adaoBuilder
+        #
+        # Mise en forme des entrees
+        # -------------------------
+        Xb = (alpha, beta, gamma)
+        Bounds = (
+            (alphamin, alphamax),
+            (betamin,  betamax ),
+            (gammamin, gammamax))
+        #
+        # TUI ADAO
+        # --------
+        case = adaoBuilder.New()
+        case.set( 'AlgorithmParameters',
+            Algorithm = 'Blue',                  # Mots-clé réservé
+            Parameters = {                        # Dictionnaire
+                "StoreSupplementaryCalculations":[# Liste de mots-clés réservés
+                    "CostFunctionJAtCurrentOptimum",
+                    "CostFunctionJoAtCurrentOptimum",
+                    "CurrentOptimum",
+                    "SimulatedObservationAtCurrentOptimum",
+                    "SimulatedObservationAtOptimum",
+                    ],
+                }
+            )
+        case.set( 'Background',
+            Vector = numpy.array(Xb),             # array, list, tuple, matrix
+            Stored = True,                        # Bool
+            )
+        case.set( 'Observation',
+            Vector = numpy.array(observations),   # array, list, tuple, matrix
+            Stored = False,                       # Bool
+            )
+        case.set( 'BackgroundError',
+            Matrix = None,                        # None ou matrice carrée
+            ScalarSparseMatrix = 1.0e10,          # None ou Real > 0
+            DiagonalSparseMatrix = None,          # None ou vecteur
+            )
+        case.set( 'ObservationError',
+            Matrix = None,                        # None ou matrice carrée
+            ScalarSparseMatrix = 1.0,             # None ou Real > 0
+            DiagonalSparseMatrix = None,          # None ou vecteur
+            )
+        case.set( 'ObservationOperator',
+            OneFunction = multisimulation,        # MultiFonction [Y] = F([X])
+            Parameters  = {                       # Dictionnaire
+                "DifferentialIncrement":0.0001,   # Real > 0
+                "CenteredFiniteDifference":False, # Bool
+                },
+            InputFunctionAsMulti = True,          # Bool
+            )
+        case.set( 'Observer',
+            Variable = "CurrentState",            # Mot-clé
+            Template = "ValuePrinter",            # Mot-clé
+            String   = None,                      # None ou code Python
+            Info     = None,                      # None ou string
+
+            )
+        case.execute()
+        #
+        # Exploitation independante
+        # -------------------------
+        Xbackground   = case.get("Background")
+        Xoptimum      = case.get("Analysis")[-1]
+        FX_at_optimum = case.get("SimulatedObservationAtOptimum")[-1]
+        J_values      = case.get("CostFunctionJAtCurrentOptimum")[:]
+        print("")
+        print("Number of internal iterations...: %i"%len(J_values))
+        print("Initial state...................: %s"%(numpy.ravel(Xbackground),))
+        print("Optimal state...................: %s"%(numpy.ravel(Xoptimum),))
+        print("Simulation at optimal state.....: %s"%(numpy.ravel(FX_at_optimum),))
+        print("")
+        #
+        # Fin du cas
+        # ----------
+        ecart = assertAlmostEqualArrays(Xoptimum, [ 2., 3., 4.])
+        #
+        print("  L'écart absolu maximal obtenu lors du test est de %.2e."%ecart)
+        print("  Les résultats obtenus sont corrects.")
+        print("")
+        #
+        return Xoptimum
+
+# ==============================================================================
+def assertAlmostEqualArrays(first, second, places=7, msg=None, delta=None):
+    "Compare two vectors, like unittest.assertAlmostEqual"
+    import numpy
+    if msg is not None:
+        print(msg)
+    if delta is not None:
+        if ( (numpy.asarray(first) - numpy.asarray(second)) > float(delta) ).any():
+            raise AssertionError("%s != %s within %s places"%(first,second,delta))
+    else:
+        if ( (numpy.asarray(first) - numpy.asarray(second)) > 10**(-int(places)) ).any():
+            raise AssertionError("%s != %s within %i places"%(first,second,places))
+    return max(abs(numpy.asarray(first) - numpy.asarray(second)))
+
+# ==============================================================================
+if __name__ == '__main__':
+    print("\nAUTODIAGNOSTIC\n==============")
+    unittest.main()