#
msgs += ("\n")
msgs += (__marge + "We take dX0 = Normal(0,X) and dX = Alpha*dX0. F is the calculation code.\n")
+ if "DifferentialIncrement" in HO and HO["DifferentialIncrement"] is not None:
+ msgs += ("\n")
+ msgs += (__marge + "Reminder: gradient operator is obtained internally by finite differences,\n")
+ msgs += (__marge + "with a differential increment of value %.2e.\n"%HO["DifferentialIncrement"])
msgs += ("\n")
msgs += (__marge + "(Remark: numbers that are (about) under %.0e represent 0 to machine precision)\n"%mpr)
print(msgs) # 1
#
msgs += ("\n")
msgs += (__marge + "We take dX0 = Normal(0,X) and dX = Alpha*dX0. F is the calculation code.\n")
+ if (self._parameters["ResiduFormula"] == "Taylor") and ("DifferentialIncrement" in HO and HO["DifferentialIncrement"] is not None):
+ msgs += ("\n")
+ msgs += (__marge + "Reminder: gradient operator is obtained internally by finite differences,\n")
+ msgs += (__marge + "with a differential increment of value %.2e.\n"%HO["DifferentialIncrement"])
msgs += ("\n")
msgs += (__marge + "(Remark: numbers that are (about) under %.0e represent 0 to machine precision)\n"%mpr)
print(msgs) # 1
X0,
)
#
- if self._parameters["ResiduFormula"] in ["Taylor", "NominalTaylor", "NominalTaylorRMS"]:
+ if self._parameters["ResiduFormula"] == "Taylor":
dX1 = float(self._parameters["AmplitudeOfTangentPerturbation"]) * dX0
GradFxdX = Ht( (X0, dX1) )
GradFxdX = numpy.ravel( GradFxdX ).reshape((-1,1))
#
msgs += ("\n")
msgs += (__marge + "We take dX0 = Normal(0,X) and dX = Alpha*dX0. F is the calculation code.\n")
+ if "DifferentialIncrement" in HO and HO["DifferentialIncrement"] is not None:
+ msgs += ("\n")
+ msgs += (__marge + "Reminder: tangent operator is obtained internally by finite differences,\n")
+ msgs += (__marge + "with a differential increment of value %.2e.\n"%HO["DifferentialIncrement"])
msgs += ("\n")
msgs += (__marge + "(Remark: numbers that are (about) under %.0e represent 0 to machine precision)\n"%mpr)
print(msgs) # 1
avoidingRedundancy = __avoidRC,
inputAsMultiFunction = inputAsMF,
extraArguments = self.__extraArgs )
+ self.__FO["DifferentialIncrement"] = __Function["DifferentialIncrement"]
elif isinstance(__Function, dict) and \
("Direct" in __Function) and ("Tangent" in __Function) and ("Adjoint" in __Function) and \
(__Function["Direct"] is not None) and (__Function["Tangent"] is not None) and (__Function["Adjoint"] is not None):
avoidingRedundancy = __avoidRC,
inputAsMultiFunction = inputAsMF,
extraArguments = self.__extraArgs )
+ self.__FO["DifferentialIncrement"] = None
elif asMatrix is not None:
if isinstance(__Matrix, str):
__Matrix = PlatformInfo.strmatrix2liststr( __Matrix )
avoidingRedundancy = __avoidRC,
inputAsMultiFunction = inputAsMF )
del __matrice
+ self.__FO["DifferentialIncrement"] = None
else:
raise ValueError(
"The %s object is improperly defined or undefined,"%self.__name+\
adaopy.setObserver ("CurrentState",Template="ValuePrinter")
adaopy.execute()
del adaopy
+ #
+ for algo in ("AdjointTest", "FunctionTest", "GradientTest", "LinearityTest", "TangentTest"):
+ print("")
+ msg = "Algorithme en test : %s"%algo
+ print(msg+"\n"+"-"*len(msg))
+ #
+ def simulation( arguments ):
+ _X = arguments
+ X = numpy.ravel( _X )
+ H = numpy.array([[1,0,0],[0,2,0],[0,0,3],[1,2,3]])
+ return numpy.dot(H,X)
+ #
+ adaopy = adaoBuilder.New()
+ adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10,"NumberOfRepetition":2, "SetSeed":1000})
+ adaopy.setCheckingPoint (Vector = [0,1,2])
+ adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
+ adaopy.setObservationOperator(OneFunction = simulation)
+ adaopy.execute()
+ del adaopy
#===============================================================================
if __name__ == "__main__":