examples/daSalome/Makefile
examples/daSalome/test003_ADAO_JDC_using_scripts.comm
examples/daSalome/test004_ADAO_JDC_using_scripts.comm
- examples/daSalome/test004_ADAO_JDC_Observers_using_scripts.comm
examples/daSalome/test005_ADAO_Operators.comm
examples/daSalome/test003_bis_ADAO_JDC_using_user_data_init.comm
+ examples/daSalome/test006_Observers.comm
examples/daSkeletons/Makefile
examples/daSkeletons/External_data_definition_by_scripts/Makefile
examples/daSkeletons/External_data_definition_by_scripts/ADAO_Case.comm
test003_ADAO_scripts_for_JDC.py \
test004_ADAO_JDC_using_scripts.comm \
test004_ADAO_scripts_for_JDC.py \
- test004_ADAO_JDC_Observers_using_scripts.comm \
test005_ADAO_Operators.comm \
test005_ADAO_scripts_for_JDC.py \
test003_bis_ADAO_JDC_using_user_data_init.comm \
test003_bis_ADAO_user_data_init.py \
- test003_bis_ADAO_scripts_for_JDC.py
+ test003_bis_ADAO_scripts_for_JDC.py \
+ test006_Observers.comm \
+ test006_Observers_observer_with_file.py \
+ test006_Observers_init.py \
+ test006_Observers_Observation_Operator.py \
+ test006_Observers_var.py
examplesdasalome_DATA = ${DATA_INST}
test003_ADAO_scripts_for_JDC.py \
test004_ADAO_JDC_using_scripts.comm.in \
test004_ADAO_scripts_for_JDC.py \
- test004_ADAO_JDC_Observers_using_scripts.comm.in \
test005_ADAO_Operators.comm.in \
test005_ADAO_scripts_for_JDC.py \
test003_bis_ADAO_JDC_using_user_data_init.comm.in \
test003_bis_ADAO_user_data_init.py \
- test003_bis_ADAO_scripts_for_JDC.py
+ test003_bis_ADAO_scripts_for_JDC.py \
+ test006_Observers.comm.in \
+ test006_Observers_observer_with_file.py \
+ test006_Observers_init.py \
+ test006_Observers_Observation_Operator.py \
+ test006_Observers_var.py
+++ /dev/null
-
-ASSIMILATION_STUDY(Study_name='Test',
- Study_repertory='@prefix@/share/salome/adao_examples/daSalome',
- Debug=0,
- Algorithm='Blue',
- Background=_F(INPUT_TYPE='Vector',
- data=_F(FROM='Script',
- SCRIPT_FILE='test004_ADAO_scripts_for_JDC.py',),),
- BackgroundError=_F(INPUT_TYPE='Matrix',
- data=_F(FROM='Script',
- SCRIPT_FILE='test004_ADAO_scripts_for_JDC.py',),),
- Observation=_F(INPUT_TYPE='Vector',
- data=_F(FROM='Script',
- SCRIPT_FILE='test004_ADAO_scripts_for_JDC.py',),),
- ObservationError=_F(INPUT_TYPE='Matrix',
- data=_F(FROM='Script',
- SCRIPT_FILE='test004_ADAO_scripts_for_JDC.py',),),
- ObservationOperator=_F(INPUT_TYPE='Matrix',
- data=_F(FROM='Script',
- SCRIPT_FILE='test004_ADAO_scripts_for_JDC.py',),),
- UserPostAnalysis=_F(FROM='String',
- STRING=
-"""import numpy
-Xa = ADD.get("Analysis").valueserie(-1)
-print
-print "Size of Analysis = %i"%len(Xa)
-print "Min, mean, max = %8.3f, %8.3f, %8.3f"%(min(Xa),numpy.mean(Xa),max(Xa))
-print
-""",),
- Observers=_F(SELECTION='Analysis',
- Analysis_data=_F(NodeType='pyscript',
- Value=
-"""print " ---> Mise en oeuvre de l'observer : affichage de la valeur courante"
-print " var =",var.valueserie(-1)
-print " info =",info
-#
-import Gnuplot
-gp = Gnuplot.Gnuplot()
-gp('set style data lines')
-gp('set title "'+str(info)+'"')
-gp.plot( Gnuplot.Data( var.valueserie(-1) ) )
-global numero
-numero += 1
-filename = "image_%02i.ps"%numero
-print " sauvegarde image %s" % filename
-gp.hardcopy(filename=filename, color=1)
-""",),),);
--- /dev/null
+
+ASSIMILATION_STUDY(Study_name='test_observers',
+ Study_repertory='@prefix@/share/salome/adao_examples/daSalome',
+ Debug=0,
+ Algorithm='3DVAR',
+ Background=_F(INPUT_TYPE='Vector',
+ data=_F(FROM='Script',
+ SCRIPT_FILE='test006_Observers_var.py',),),
+ BackgroundError=_F(INPUT_TYPE='Matrix',
+ data=_F(FROM='Script',
+ SCRIPT_FILE='test006_Observers_var.py',),),
+ Observation=_F(INPUT_TYPE='Vector',
+ data=_F(FROM='Script',
+ SCRIPT_FILE='test006_Observers_var.py',),),
+ ObservationError=_F(INPUT_TYPE='Matrix',
+ data=_F(FROM='Script',
+ SCRIPT_FILE='test006_Observers_var.py',),),
+ ObservationOperator=_F(INPUT_TYPE='Function',
+ data=_F(FROM='FunctionDict',
+ FUNCTIONDICT_FILE='test006_Observers_Observation_Operator.py',),),
+ AlgorithmParameters=_F(INPUT_TYPE='Dict',
+ data=_F(FROM='Script',
+ SCRIPT_FILE='test006_Observers_var.py',),),
+ UserDataInit=_F(INIT_FILE='test006_Observers_init.py',
+ TARGET_LIST=
+ ('Background','BackgroundError','Observation',
+ 'ObservationError','AlgorithmParameters',),),
+ Observers=_F(SELECTION=('CurrentState','CostFunctionJ',),
+ CostFunctionJ_data=_F(NodeType='pyscript',
+ Value=
+"""print " ---> observerCost"
+print " var =",var.valueserie()
+print " info =",info
+#
+import Gnuplot
+import os
+try:
+ numero
+except NameError:
+ numero = 0
+gp = Gnuplot.Gnuplot()
+gp('set style data lines')
+gp('set title "'+str(info)+'"')
+gp.plot( Gnuplot.Data( var.valueserie() ) )
+filename = os.path.join("/tmp", "imageCost_%02i.ps"%numero)
+print " imageCost %s"%filename
+gp.hardcopy(filename=filename, color=1)
+numero += 1
+""",),
+ CurrentState_data=_F(NodeType='userfile',
+ Value='test006_Observers_observer_with_file.py',),),);
--- /dev/null
+#-*-coding:iso-8859-1-*-
+# Copyright (C) 2010-2011 EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+# Author: André Ribes, andre.ribes@edf.fr, EDF R&D
+
+import numpy
+import time
+import logging
+logging.info("ComputationFunctionNode: Begin")
+# ==============================================================================
+# Input data and parameters: all is in the required input variable
+# "computation", containing for example:
+# {'inputValues': [[[[0.0, 0.0, 0.0]]]],
+# 'inputVarList': ['adao_default'],
+# 'outputVarList': ['adao_default'],
+# 'specificParameters': [{'name': 'method', 'value': 'Direct'}]}
+# ==============================================================================
+#
+# Recovering the type of computation: "Direct", "Tangent" or "Adjoint"
+# --------------------------------------------------------------------
+method = ""
+for param in computation["specificParameters"]:
+ if param["name"] == "method":
+ method = param["value"]
+logging.info("ComputationFunctionNode: Found method is \'%s\'"%method)
+#
+# Recovering the current control state X
+# --------------------------------------
+Xcurrent = computation["inputValues"][0][0][0]
+#
+# Building explicit calculation or requiring external ones
+# --------------------------------------------------------
+dimension = 3
+H = numpy.matrix(numpy.core.identity(dimension))
+#
+def FunctionH( X ):
+ time.sleep(1)
+ return H * X
+#
+def AdjointH( (X, Y) ):
+ return H.T * Y
+#
+# The possible computations
+# -------------------------
+if method == "Direct":
+ logging.info("ComputationFunctionNode: Direct computation")
+ data = FunctionH(numpy.matrix( Xcurrent ).T)
+#
+if method == "Tangent":
+ logging.info("ComputationFunctionNode: Tangent computation")
+ data = FunctionH(numpy.matrix( Xcurrent ).T)
+#
+if method == "Adjoint":
+ logging.info("ComputationFunctionNode: Adjoint computation")
+ Ycurrent = computation["inputValues"][0][0][1]
+ data = AdjointH((numpy.matrix( Xcurrent ).T, numpy.matrix( Ycurrent ).T))
+#
+# Formatting the output
+# ---------------------
+logging.info("ComputationFunctionNode: Formatting the output")
+it = data.flat
+outputValues = [[[[]]]]
+for val in it:
+ outputValues[0][0][0].append(val)
+#
+result = {}
+result["outputValues"] = outputValues
+result["specificOutputInfos"] = []
+result["returnCode"] = 0
+result["errorMessage"] = ""
+#
+logging.info("ComputationFunctionNode: End")
--- /dev/null
+#-*-coding:iso-8859-1-*-
+# Copyright (C) 2010-2011 EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+# Author: André Ribes, andre.ribes@edf.fr, EDF R&D
+
+import numpy
+
+def FunctionH( X ):
+ return H * X
+
+dimension = 3
+xt = numpy.matrix(numpy.arange(dimension)).T
+Eo = numpy.matrix(numpy.random.normal(0.,1.,size=(dimension,))).T
+Eb = numpy.matrix(numpy.random.normal(0.,1.,size=(dimension,))).T
+H = numpy.matrix(numpy.random.normal(0.,1.,size=(dimension,dimension)))
+xb = xt + Eb
+yo = FunctionH( xt ) + Eo
+xb = xb.A1
+yo = yo.A1
+R = numpy.matrix(numpy.core.identity(dimension)).T
+B = numpy.matrix(numpy.core.identity(dimension)).T
+
+#
+# Definition of the Background as a vector
+# ----------------------------------------
+Background = xb
+#
+# Definition of the Observation as a vector
+# -----------------------------------------
+Observation = yo
+#
+# Definition of the Background Error covariance as a matrix
+# ---------------------------------------------------------
+BackgroundError = B
+#
+# Definition of the Observation Error covariance as a matrix
+# ----------------------------------------------------------
+ObservationError = R
+
+print xb
+print B
+print yo
+print R
+
+#
+# Definition of the init_data dictionnary
+# ---------------------------------------
+init_data = {}
+init_data["Background"] = Background
+init_data["Observation"] = Observation
+init_data["BackgroundError"] = BackgroundError
+init_data["ObservationError"] = ObservationError
+
+# Algorithm Parameters
+init_data["AlgorithmParameters"] = {"Minimizer":"LBFGSB","MaximumNumberOfSteps":5}
--- /dev/null
+print " ---> observerState"
+print " var =",var.valueserie(-1)
+print " info =",info
+#
+import Gnuplot
+import os
+
+try:
+ numero
+except NameError:
+ numero = 0
+
+gp = Gnuplot.Gnuplot()
+gp('set style data lines')
+gp('set title "'+str(info)+'"')
+gp.plot( Gnuplot.Data( var.valueserie(-1) ) )
+
+filename = os.path.join("/tmp", "imageState_%02i.ps"%numero)
+print " imageState \"%s\""%filename
+
+gp.hardcopy(filename=filename, color=1)
+numero += 1
--- /dev/null
+#-*-coding:iso-8859-1-*-
+# Copyright (C) 2010-2011 EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+# Author: André Ribes, andre.ribes@edf.fr, EDF R&D
+
+import numpy
+#
+# Definition of the Background as a vector
+# ----------------------------------------
+Background = init_data["Background"]
+#
+# Definition of the Observation as a vector
+# -----------------------------------------
+Observation = init_data["Observation"]
+#
+# Definition of the Background Error covariance as a matrix
+# ---------------------------------------------------------
+BackgroundError = init_data["BackgroundError"]
+#
+# Definition of the Observation Error covariance as a matrix
+# ----------------------------------------------------------
+ObservationError = init_data["ObservationError"]
+#
+# Definition of the Observation Operator as a matrix
+# --------------------------------------------------
+AlgorithmParameters = init_data["AlgorithmParameters"]
for observer_name in observers.keys():
scheduler = ""
info = ""
+ number = str(observers[observer_name]["number"])
if "scheduler" in observers[observer_name].keys():
scheduler = observers[observer_name]["scheduler"]
if "info" in observers[observer_name].keys():
- scheduler = observers[observer_name]["info"]
- assim_study.addObserver(observer_name, scheduler, info)
+ info = observers[observer_name]["info"]
+ assim_study.addObserver(observer_name, scheduler, info, number)
Study = assim_study
]]>
<inline name="ExtractDataNode">
<script><code><![CDATA[
import pickle
+from daCore.AssimilationStudy import AssimilationStudy
print "Entering in ExtractData"
var = None
info = None
if param["name"] == "var":
var = pickle.loads(param["value"])
if param["name"] == "info":
- info = pickle.loads(param["value"])
+ info = param["value"]
]]></code></script>
<inport name="data" type="SALOME_TYPES/ParametricInput"/>
<outport name="var" type="pyobj"/>
<inline name="ObservationNodeFile">
<script><code><![CDATA[
print "Entering in Observation"
-# Get file path and filename
-import sys
-import os
-filepath = os.path.dirname(script)
-filename = os.path.basename(script)
-module_name = os.path.splitext(filename)[0]
-sys.path.insert(0,filepath)
-# Import script
-__import__(module_name)
+execfile(script)
]]></code></script>
<inport name="var" type="pyobj"/>
def __init__(self, optim_algo):
self.optim_algo = optim_algo
- # Gestion du compteur
- self.sample_counter = 0
- self.counter_lock = threading.Lock()
-
def create_sample(self, data, method):
sample = pilot.StructAny_New(self.optim_algo.runtime.getTypeCode('SALOME_TYPES/ParametricInput'))
method_name.setEltAtRank("name", "method")
method_name.setEltAtRank("value", method)
specificParameters.pushBack(method_name)
+ print self.optim_algo.has_observer
+ if self.optim_algo.has_observer:
+ obs_switch = pilot.StructAny_New(self.optim_algo.runtime.getTypeCode('SALOME_TYPES/Parameter'))
+ obs_switch.setEltAtRank("name", "switch_value")
+ obs_switch.setEltAtRank("value", "1")
+ specificParameters.pushBack(obs_switch)
sample.setEltAtRank("specificParameters", specificParameters)
# Les données
return matrix
def Direct(self, X, sync = 1):
- #print "Call Direct OptimizerHooks"
+ print "Call Direct OptimizerHooks"
if sync == 1:
# 1: Get a unique sample number
- self.counter_lock.acquire()
- self.sample_counter += 1
- local_counter = self.sample_counter
+ self.optim_algo.counter_lock.acquire()
+ self.optim_algo.sample_counter += 1
+ local_counter = self.optim_algo.sample_counter
# 2: Put sample in the job pool
sample = self.create_sample(X, "Direct")
# 5: Release lock
# Have to be done before but need a new implementation
# of the optimizer loop
- self.counter_lock.release()
+ self.optim_algo.counter_lock.release()
return Y
else:
#print "sync false is not yet implemented"
#print "Call Tangent OptimizerHooks"
if sync == 1:
# 1: Get a unique sample number
- self.counter_lock.acquire()
- self.sample_counter += 1
- local_counter = self.sample_counter
+ self.optim_algo.counter_lock.acquire()
+ self.optim_algo.sample_counter += 1
+ local_counter = self.optim_algo.sample_counter
# 2: Put sample in the job pool
sample = self.create_sample(X, "Tangent")
# 5: Release lock
# Have to be done before but need a new implementation
# of the optimizer loop
- self.counter_lock.release()
+ self.optim_algo.counter_lock.release()
return Y
else:
#print "sync false is not yet implemented"
#print "Call Adjoint OptimizerHooks"
if sync == 1:
# 1: Get a unique sample number
- self.counter_lock.acquire()
- self.sample_counter += 1
- local_counter = self.sample_counter
+ self.optim_algo.counter_lock.acquire()
+ self.optim_algo.sample_counter += 1
+ local_counter = self.optim_algo.sample_counter
# 2: Put sample in the job pool
sample = self.create_sample((X,Y), "Adjoint")
# 5: Release lock
# Have to be done before but need a new implementation
# of the optimizer loop
- self.counter_lock.release()
+ self.optim_algo.counter_lock.release()
return Z
else:
#print "sync false is not yet implemented"
SALOMERuntime.OptimizerAlgASync.__init__(self, None)
self.runtime = SALOMERuntime.getSALOMERuntime()
+ self.has_observer = False
+
+ # Gestion du compteur
+ self.sample_counter = 0
+ self.counter_lock = threading.Lock()
+
# Definission des types d'entres et de sorties pour le code de calcul
self.tin = self.runtime.getTypeCode("SALOME_TYPES/ParametricInput")
self.tout = self.runtime.getTypeCode("SALOME_TYPES/ParametricOutput")
# Set Observers
for observer_name in self.da_study.observers_dict.keys():
- if observers_dict[observer_name]["scheduler"] != "":
- self.ADD.setDataObserver(observer_name, HookFunction=self.obs, Scheduler = observers_dict[observer_name]["scheduler"], HookParameters = observer_name)
+ print "observers %s found" % observer_name
+ self.has_observer = True
+ if self.da_study.observers_dict[observer_name]["scheduler"] != "":
+ self.ADD.setDataObserver(observer_name, HookFunction=self.obs, Scheduler = self.da_study.observers_dict[observer_name]["scheduler"], HookParameters = observer_name)
else:
self.ADD.setDataObserver(observer_name, HookFunction=self.obs, HookParameters = observer_name)
# Start Assimilation Study
- #print "ADD analyze"
+ print "ADD analyze"
self.ADD.analyze()
# Assimilation Study is finished
self.pool.destroyAll()
def obs(self, var, info):
- print "Hook observer called with:"
- print "var %s" % var
- print "inof %s" % info
+ print "Call observer %s" % info
+ sample = pilot.StructAny_New(self.runtime.getTypeCode('SALOME_TYPES/ParametricInput'))
+
+ # Fake data
+ inputVarList = pilot.SequenceAny_New(self.runtime.getTypeCode("string"))
+ outputVarList = pilot.SequenceAny_New(self.runtime.getTypeCode("string"))
+ inputVarList.pushBack("a")
+ outputVarList.pushBack("a")
+ sample.setEltAtRank("inputVarList", inputVarList)
+ sample.setEltAtRank("outputVarList", outputVarList)
+ variable = pilot.SequenceAny_New(self.runtime.getTypeCode("double"))
+ variable_sequence = pilot.SequenceAny_New(variable.getType())
+ state_sequence = pilot.SequenceAny_New(variable_sequence.getType())
+ time_sequence = pilot.SequenceAny_New(state_sequence.getType())
+ variable.pushBack(1.0)
+ variable_sequence.pushBack(variable)
+ state_sequence.pushBack(variable_sequence)
+ time_sequence.pushBack(state_sequence)
+ sample.setEltAtRank("inputValues", time_sequence)
+
+ # Add observer values in specific parameters
+ specificParameters = pilot.SequenceAny_New(self.runtime.getTypeCode("SALOME_TYPES/Parameter"))
+
+ # Switch Value
+ obs_switch = pilot.StructAny_New(self.runtime.getTypeCode('SALOME_TYPES/Parameter'))
+ obs_switch.setEltAtRank("name", "switch_value")
+ obs_switch.setEltAtRank("value", self.da_study.observers_dict[info]["number"])
+ specificParameters.pushBack(obs_switch)
+
+ # Var
+ var_struct = pilot.StructAny_New(self.runtime.getTypeCode('SALOME_TYPES/Parameter'))
+ var_struct.setEltAtRank("name", "var")
+
+ # Remove Data Observer, so you can ...
+ var.removeDataObserver(self.obs)
+ # Pickle then ...
+ var_str = pickle.dumps(var)
+ # Add Again Data Observer
+ if self.da_study.observers_dict[info]["scheduler"] != "":
+ self.ADD.setDataObserver(info, HookFunction=self.obs, Scheduler = self.da_study.observers_dict[info]["scheduler"], HookParameters = info)
+ else:
+ self.ADD.setDataObserver(info, HookFunction=self.obs, HookParameters = info)
+ var_struct.setEltAtRank("value", var_str)
+ specificParameters.pushBack(var_struct)
+
+ # Info
+ info_struct = pilot.StructAny_New(self.runtime.getTypeCode('SALOME_TYPES/Parameter'))
+ info_struct.setEltAtRank("name", "info")
+ info_struct.setEltAtRank("value", self.da_study.observers_dict[info]["info"])
+ specificParameters.pushBack(info_struct)
+
+ sample.setEltAtRank("specificParameters", specificParameters)
+
+ self.counter_lock.acquire()
+ self.sample_counter += 1
+ local_counter = self.sample_counter
+ self.pool.pushInSample(local_counter, sample)
+
+ # Wait
+ import sys, traceback
+ try:
+ while 1:
+ self.signalMasterAndWait()
+ if self.isTerminationRequested():
+ self.pool.destroyAll()
+ else:
+ # Get current Id
+ sample_id = self.pool.getCurrentId()
+ if sample_id == local_counter:
+ # 5: Release lock
+ # Have to be done before but need a new implementation
+ # of the optimizer loop
+ self.counter_lock.release()
+ break
+ except:
+ print "Exception in user code:"
+ print '-'*60
+ traceback.print_exc(file=sys.stdout)
+ print '-'*60
def getAlgoResult(self):
#print "getAlgoResult"
self.ADD.prepare_to_pickle()
+ # Remove data observers cannot pickle assimilation study object
+ for observer_name in self.da_study.observers_dict.keys():
+ self.ADD.removeDataObserver(observer_name, self.obs)
result = pickle.dumps(self.da_study)
return result
self.ADD.setAlgorithm(choice=self.algorithm)
if self.algorithm_dict != None:
+ print self.algorithm_dict
self.ADD.setAlgorithmParameters(asDico=self.algorithm_dict)
def getAssimilationStudy(self):
elif self.ObservationOperatorType[Name] == "Function":
self.FunctionObservationOperator[Name] = ObservationOperator
- def addObserver(self, name, scheduler, info):
+ def addObserver(self, name, scheduler, info, number):
self.observers_dict[name] = {}
self.observers_dict[name]["scheduler"] = scheduler
self.observers_dict[name]["info"] = info
+ self.observers_dict[name]["number"] = number
def getObservers(self):
return self.observers_dict