.. include:: snippets/GradientNormTolerance.rst
+.. include:: snippets/InitializationPoint.rst
+
.. include:: snippets/MaximumNumberOfSteps.rst
.. include:: snippets/Minimizer_xDVAR.rst
.. include:: snippets/GradientNormTolerance.rst
+.. include:: snippets/InitializationPoint.rst
+
.. include:: snippets/MaximumNumberOfSteps.rst
.. include:: snippets/Minimizer_xDVAR.rst
--- /dev/null
+.. index:: single: InitializationPoint
+
+InitializationPoint
+ *Vector*. The variable specifies one vector to be used as the initial state
+ around which an iterative algorithm starts. By default, this initial state is
+ not required and is equal to the background :math:`\mathbf{x}^b`. If
+ provided, it replaces the background only for initialization. Its value must
+ allow to build a vector of the same size as the background.
.. include:: snippets/GradientNormTolerance.rst
+.. include:: snippets/InitializationPoint.rst
+
.. include:: snippets/MaximumNumberOfSteps.rst
.. include:: snippets/Minimizer_xDVAR.rst
.. include:: snippets/GradientNormTolerance.rst
+.. include:: snippets/InitializationPoint.rst
+
.. include:: snippets/MaximumNumberOfSteps.rst
.. include:: snippets/Minimizer_xDVAR.rst
--- /dev/null
+.. index:: single: InitializationPoint
+
+InitializationPoint
+ *Vecteur*. La variable désigne un vecteur à utiliser comme l'état initial
+ autour duquel démarre un algorithme itératif. Par défaut, cet état initial
+ n'a pas besoin d'être fourni et il est égal à l'ébauche :math:`\mathbf{x}^b`.
+ Dans le cas où il est fourni, il ne remplace l'ébauche que pour
+ l'initialisation. Sa valeur doit permettre de construire un vecteur de taille
+ identique à l'ébauche.
name = "Bounds",
message = "Liste des valeurs de bornes",
)
+ self.defineRequiredParameter(
+ name = "InitializationPoint",
+ typecast = numpy.ravel,
+ message = "État initial imposé (par défaut, c'est l'ébauche si None)",
+ )
self.requireInputArguments(
mandatory= ("Xb", "Y", "HO", "R", "B" ),
)
GradJ = numpy.asmatrix( numpy.ravel( GradJb ) + numpy.ravel( GradJo ) ).T
return GradJ.A1
#
- # Point de démarrage de l'optimisation : Xini = Xb
+ # Point de démarrage de l'optimisation
# ------------------------------------
- Xini = numpy.ravel(Xb)
+ if self._parameters["InitializationPoint"] is not None:
+ __ipt = numpy.ravel(self._parameters["InitializationPoint"])
+ if __ipt.size != numpy.ravel(Xb).size:
+ raise ValueError("Incompatible size %i of forced initial point to replace the Xb of size %i" \
+ %(__ipt.size,numpy.ravel(Xb).size))
+ else:
+ Xini = __ipt
+ else:
+ Xini = numpy.ravel(Xb)
#
# Minimisation de la fonctionnelle
# --------------------------------
name = "Bounds",
message = "Liste des valeurs de bornes",
)
+ self.defineRequiredParameter(
+ name = "InitializationPoint",
+ typecast = numpy.ravel,
+ message = "État initial imposé (par défaut, c'est l'ébauche si None)",
+ )
self.requireInputArguments(
mandatory= ("Xb", "Y", "HO", "EM", "R", "B" ),
optional = ("U", "CM"),
GradJ = numpy.ravel( GradJb ) - numpy.ravel( GradJo )
return GradJ
#
- # Point de démarrage de l'optimisation : Xini = Xb
+ # Point de démarrage de l'optimisation
# ------------------------------------
- if isinstance(Xb, type(numpy.matrix([]))):
- Xini = Xb.A1.tolist()
+ if self._parameters["InitializationPoint"] is not None:
+ __ipt = numpy.ravel(self._parameters["InitializationPoint"])
+ if __ipt.size != numpy.ravel(Xb).size:
+ raise ValueError("Incompatible size %i of forced initial point to replace the Xb of size %i" \
+ %(__ipt.size,numpy.ravel(Xb).size))
+ else:
+ Xini = __ipt
else:
- Xini = list(Xb)
+ Xini = numpy.ravel(Xb)
#
# Minimisation de la fonctionnelle
# --------------------------------
Parameters = None,
Script = None):
"Mise a jour d'un concept de calcul"
- if "AlgorithmParameters" not in self.__adaoObject or self.__adaoObject["AlgorithmParameters"] is None:
+ Concept = "AlgorithmParameters"
+ if Concept not in self.__adaoObject or self.__adaoObject[Concept] is None:
raise ValueError("\n\nNo algorithm registred, set one before updating parameters or executing\n")
- self.__adaoObject["AlgorithmParameters"].updateParameters(
+ self.__adaoObject[Concept].updateParameters(
asDict = Parameters,
asScript = self.__with_directory(Script),
)
+ # RaJ du register
return 0
def setRegulationParameters(self,