]> SALOME platform Git repositories - modules/adao.git/commitdiff
Salome HOME
Minor tests review corrections
authorJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Mon, 20 Nov 2023 14:25:53 +0000 (15:25 +0100)
committerJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Mon, 20 Nov 2023 14:25:53 +0000 (15:25 +0100)
src/daComposant/daAlgorithms/GradientTest.py
src/daComposant/daAlgorithms/LinearityTest.py
src/daComposant/daAlgorithms/TangentTest.py
src/daComposant/daCore/BasicObjects.py
test/test6902/Verification_des_Checking_Algorithms.py

index 99b4ad193bf82d9c47f8f5c9dc26d33094a61b78..67627ecd7f7d5ab11bdfcf8397c92e9be1d873ca 100644 (file)
@@ -216,6 +216,10 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             #
         msgs += ("\n")
         msgs += (__marge + "We take dX0 = Normal(0,X) and dX = Alpha*dX0. F is the calculation code.\n")
+        if "DifferentialIncrement" in HO and HO["DifferentialIncrement"] is not None:
+            msgs += ("\n")
+            msgs += (__marge + "Reminder: gradient operator is obtained internally by finite differences,\n")
+            msgs += (__marge + "with a differential increment of value %.2e.\n"%HO["DifferentialIncrement"])
         msgs += ("\n")
         msgs += (__marge + "(Remark: numbers that are (about) under %.0e represent 0 to machine precision)\n"%mpr)
         print(msgs) # 1
index a151f558e78ddeb4672bd67d69a6dd69bbd1e9c2..97d3113151b1441e267002e0083d690bcfbba707 100644 (file)
@@ -223,6 +223,10 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             #
         msgs += ("\n")
         msgs += (__marge + "We take dX0 = Normal(0,X) and dX = Alpha*dX0. F is the calculation code.\n")
+        if (self._parameters["ResiduFormula"] == "Taylor") and ("DifferentialIncrement" in HO and HO["DifferentialIncrement"] is not None):
+            msgs += ("\n")
+            msgs += (__marge + "Reminder: gradient operator is obtained internally by finite differences,\n")
+            msgs += (__marge + "with a differential increment of value %.2e.\n"%HO["DifferentialIncrement"])
         msgs += ("\n")
         msgs += (__marge + "(Remark: numbers that are (about) under %.0e represent 0 to machine precision)\n"%mpr)
         print(msgs) # 1
@@ -245,7 +249,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             X0,
             )
         #
-        if self._parameters["ResiduFormula"] in ["Taylor", "NominalTaylor", "NominalTaylorRMS"]:
+        if self._parameters["ResiduFormula"] == "Taylor":
             dX1      = float(self._parameters["AmplitudeOfTangentPerturbation"]) * dX0
             GradFxdX = Ht( (X0, dX1) )
             GradFxdX = numpy.ravel( GradFxdX ).reshape((-1,1))
index e653bed6aee29951efac9bb171f8092c8a457bcd..525210f856003fa015b39abd391e6320c2dd743a 100644 (file)
@@ -167,6 +167,10 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             #
         msgs += ("\n")
         msgs += (__marge + "We take dX0 = Normal(0,X) and dX = Alpha*dX0. F is the calculation code.\n")
+        if "DifferentialIncrement" in HO and HO["DifferentialIncrement"] is not None:
+            msgs += ("\n")
+            msgs += (__marge + "Reminder: tangent operator is obtained internally by finite differences,\n")
+            msgs += (__marge + "with a differential increment of value %.2e.\n"%HO["DifferentialIncrement"])
         msgs += ("\n")
         msgs += (__marge + "(Remark: numbers that are (about) under %.0e represent 0 to machine precision)\n"%mpr)
         print(msgs) # 1
index 7977635623a4ae4dd1dded38617be6e5a7663fbb..659e34d8d11ecb1e6ec6474965f8e50aee05ab8b 100644 (file)
@@ -587,6 +587,7 @@ class FullOperator(object):
                 avoidingRedundancy = __avoidRC,
                 inputAsMultiFunction = inputAsMF,
                 extraArguments = self.__extraArgs )
+            self.__FO["DifferentialIncrement"] = __Function["DifferentialIncrement"]
         elif isinstance(__Function, dict) and \
                 ("Direct" in __Function) and ("Tangent" in __Function) and ("Adjoint" in __Function) and \
                 (__Function["Direct"] is not None) and (__Function["Tangent"] is not None) and (__Function["Adjoint"] is not None):
@@ -612,6 +613,7 @@ class FullOperator(object):
                 avoidingRedundancy = __avoidRC,
                 inputAsMultiFunction = inputAsMF,
                 extraArguments = self.__extraArgs )
+            self.__FO["DifferentialIncrement"] = None
         elif asMatrix is not None:
             if isinstance(__Matrix, str):
                 __Matrix = PlatformInfo.strmatrix2liststr( __Matrix )
@@ -636,6 +638,7 @@ class FullOperator(object):
                 avoidingRedundancy = __avoidRC,
                 inputAsMultiFunction = inputAsMF )
             del __matrice
+            self.__FO["DifferentialIncrement"] = None
         else:
             raise ValueError(
                 "The %s object is improperly defined or undefined,"%self.__name+\
index 5b96d99989e7a4811bdd8e6210653f2ab107e043..24857278f9204bc64f3a48bef8b6268b9ce181d1 100644 (file)
@@ -77,6 +77,25 @@ class Test_Adao(unittest.TestCase):
             adaopy.setObserver           ("CurrentState",Template="ValuePrinter")
             adaopy.execute()
             del adaopy
+        #
+        for algo in ("AdjointTest", "FunctionTest", "GradientTest", "LinearityTest", "TangentTest"):
+            print("")
+            msg = "Algorithme en test : %s"%algo
+            print(msg+"\n"+"-"*len(msg))
+            #
+            def simulation( arguments ):
+                _X = arguments
+                X = numpy.ravel( _X )
+                H = numpy.array([[1,0,0],[0,2,0],[0,0,3],[1,2,3]])
+                return numpy.dot(H,X)
+            #
+            adaopy = adaoBuilder.New()
+            adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10,"NumberOfRepetition":2, "SetSeed":1000})
+            adaopy.setCheckingPoint      (Vector = [0,1,2])
+            adaopy.setBackgroundError    (ScalarSparseMatrix = 1.)
+            adaopy.setObservationOperator(OneFunction = simulation)
+            adaopy.execute()
+            del adaopy
 
 #===============================================================================
 if __name__ == "__main__":