--- /dev/null
+ierr = psspy.add_details_to_opf_log(1)
+ierr = psspy.produce_opf_log_file(1,r'C:\Users\j15773\Documents\GTDosier\PSEN\Versions\PSEN_V14 - ec dispatch\Example\Results\LOG.log')
\ No newline at end of file
--- /dev/null
+# -*- coding: cp1252 -*-
+#===============================================================================
+# PSEN SCRIPT FOR PROBABILISTIC STUDIES OF ELECTICAL NETWORKS
+#===============================================================================
+from pylab import *
+from math import*
+import os, random, sys,copy,multiprocessing
+import numpy as np
+import time #import gmtime, strftime, sleep
+from array import *
+import PSENconfig #file with Eficas output dictionaries
+from support_functionsPF import *
+import shutil
+import pdb
+import csv
+
+from openturns import * #decommenter apres
+InitializeDispatchGentoP0 = False
+# Debug = False
+Debug = True
+if __name__ == '__main__':
+ start_total = time.clock();
+ start = time.clock(); #++++++++++++++++++
+
+
+ if Debug:
+ cmd_Path=os.getcwd()+r'\usrCmdPF.py' #lancement depuis pssewrapper.py
+ #cmd_Path=os.getcwd()+'\PSEN\usrCmd.py' #lancement depuis qteficas_psen.py
+ else:
+ cmd_Path=os.path.join(os.path.dirname(os.path.abspath(__file__)),"usrCmdPF.py")
+ ##cmd_Path=os.getcwd()+'\EficasV1\PSEN_Eficas\PSEN\usrCmd.py' #lancement avec le .bat
+#===============================================================================
+# Recuperation donnees utilisateurs - User data
+#===============================================================================
+ #extract laws from Eficas Output
+ Paths = PSENconfig.Dico['DIRECTORY']
+ SimuParams = PSENconfig.Dico['SIMULATION']
+ PFParams = PSENconfig.Dico['PF_PARAMETERS']
+
+ if 'CORRELATION' in PSENconfig.Dico:#sortir list de lawnames
+ LawNames = RemoveListfromString(PSENconfig.Dico['CORRELATION']['CorrelationMatrix'][0])
+ Laws = {}
+ NonActiveIndices = []
+ TSindices = []
+ for key in PSENconfig.Dico.keys():
+ if key[0:12] == 'DISTRIBUTION':
+ shortkey = key[12:]
+ if PSENconfig.Dico[key]['Activated']==True: #only take into account laws which are "activated"
+ Laws[shortkey]= PSENconfig.Dico[key]
+ if Laws[shortkey]['Law']=='PDF_from_file': #read contents of .csv file
+ g=open(Laws[shortkey]['FileName'],"r")
+ lines=g.readlines()
+ g.close()
+ Laws[shortkey]['FileContents']=lines
+ elif Laws[shortkey]['Law']=='TimeSeries_from_file': #read contents of .csv file
+ g=open(Laws[shortkey]['FileName'],"r")
+ lines=g.readlines()
+ g.close()
+ Laws[shortkey]['FileContents']=lines
+ if 'CORRELATION' in PSENconfig.Dico:
+ TSindices.append(LawNames.index(shortkey))
+ if isinstance(Laws[shortkey][Laws[shortkey]['ComponentType']],str):
+ Laws[shortkey][Laws[shortkey]['ComponentType']]=[Laws[shortkey][Laws[shortkey]['ComponentType']]] #if only one entry, create list
+ if 'TF_Input' in Laws[shortkey]: #If user inputted transfer function
+ Laws[shortkey]['TransferFunction']=True
+ else:
+ Laws[shortkey]['TransferFunction']=False
+ else:
+ if 'CORRELATION' in PSENconfig.Dico:
+ NonActiveIndices.append(LawNames.index(shortkey))
+
+ if 'CORRELATION' in PSENconfig.Dico:
+ #Treat Correlation Matrix - eliminate non-activated laws
+ CorrMatrix0 = {}
+ LawNames2 = []
+
+ for i, lawname in enumerate(LawNames):
+ if i not in NonActiveIndices:
+ LawNames2.append(lawname)
+ Cmax = PSENconfig.Dico['CORRELATION']['CorrelationMatrix'][1:]
+ CMax = []
+ for i,c in enumerate(Cmax):
+ if i not in NonActiveIndices:
+ c = RemoveListfromString(c)
+ c = map(float,c)
+ c2 = []
+ for ind, c_el in enumerate(c):
+ if ind not in NonActiveIndices:
+ ## c2.append(c_el)
+
+ #if time series, don't correlate other laws with the value "1".
+ if (ind not in TSindices) and (i not in TSindices):
+ c2.append(c_el)
+ elif i==ind:
+ c2.append(1.)
+ else:
+ c2.append(0.)
+ CMax.append(c2)
+
+ CorrMatrix0['matrix'] = np.array(CMax)
+ CorrMatrix0['laws'] = LawNames2
+
+ else: #acceptable only if all active distributions are time series or if only 1 active distribution
+
+ if len(Laws)==1: #create correlation matrix of 1 x 1
+ CorrMatrix0 = {}
+ CorrMatrix0['matrix'] = np.array([[1]])
+ CorrMatrix0['laws'] = Laws.keys()
+ else: #>1 law, test if all TS
+ allTS=True
+ for key in Laws.keys():
+ if Laws[key]['Law']!='TimeSeries_from_file':
+ allTS=False
+ if allTS:
+ CorrMatrix0 = {}
+ CorrMatrix0['matrix']=np.eye(len(Laws))
+ CorrMatrix0['laws']=Laws.keys()
+ else:
+ print ('Error: Correlation matrix must be defined. Enter 0''s for correlations between laws and time series.')
+ sys.exit(1)
+
+ #Duplicate Laws for cases where 1 law defined for multiple components and different sampling should be performed per component:
+ isDuplicateLaws = False
+ for law in list(Laws.keys()):
+ if 'One sample per ' in Laws[law]['Sampling']:
+ isDuplicateLaws = True
+ ComponentType = Laws[law]['ComponentType']
+ ComponentList = Laws[law][ComponentType]
+ for component in ComponentList:
+ lawname = law + "_" + component
+ Laws[lawname]=Laws[law].copy() #make a copy of the law
+ Laws[lawname][ComponentType]=[component] #apply law to only one component, not whole list
+ del Laws[law]
+ else: #one sample for all components defined by law
+ i = CorrMatrix0['laws'].index(law)
+ if CorrMatrix0['matrix'][i][i] != 1:
+ print( 'Error: Correlation must be 1 between law and itself for law with same sample for all components. (' + law + ')')
+ sys.exit(1)
+ #CorrMaxtrix0['matrix'][i][i] = 1
+
+ #retreat CorrelationMatrix
+ if isDuplicateLaws:
+ CorrMatrix = {}
+ CorrMatrix['laws']=Laws.keys()
+ CorrMatrix['matrix']=np.eye(len(Laws.keys()))
+ for x,lawname1 in enumerate(Laws.keys()):
+ for i,lawname1_0 in enumerate(CorrMatrix0['laws']):
+ if lawname1_0 in lawname1:
+ break
+ for y, lawname2 in enumerate(Laws.keys()):
+ for j,lawname2_0 in enumerate(CorrMatrix0['laws']):
+ if lawname2_0 in lawname2:
+ break
+ if x!=y:
+ CorrMatrix['matrix'][x][y] = CorrMatrix0['matrix'][i][j]
+ CorrMatrix['matrix'][y][x] = CorrMatrix0['matrix'][j][i]
+
+ else:
+ CorrMatrix = CorrMatrix0
+ #retest for positive definiteness
+ if not np.all(np.linalg.eigvals(CorrMatrix['matrix'])>0):
+ print ('Error: Correlation matrix is not positive definite.')
+ sys.exit(1)
+ #execution file name
+ exec_file="report.txt"
+
+ # Treat Contingency Files enteres as CSVs
+ LinesList = []
+ GeneratorsList = []
+ LoadsList = []
+ TransformersList = []
+ MotorsList = []
+
+ if 'N_1_LINES' in PSENconfig.Dico:
+ if PSENconfig.Dico['N_1_LINES']['Activated']==True:
+ LinesList = PSENconfig.Dico['N_1_LINES']['Probability']
+ if 'N_1_GENERATORS' in PSENconfig.Dico:
+ if PSENconfig.Dico['N_1_GENERATORS']['Activated']==True:
+ GeneratorsList = PSENconfig.Dico['N_1_GENERATORS']['Probability']
+ if 'N_1_LOADS' in PSENconfig.Dico:
+ if PSENconfig.Dico['N_1_LOADS']['Activated']==True:
+ LoadsList = PSENconfig.Dico['N_1_LOADS']['Probability']
+ if 'N_1_TRANSFORMERS' in PSENconfig.Dico:
+ if PSENconfig.Dico['N_1_TRANSFORMERS']['Activated']==True:
+ TransformersList = PSENconfig.Dico['N_1_TRANSFORMERS']['Probability']
+ if 'N_1_MOTORS' in PSENconfig.Dico:
+ if PSENconfig.Dico['N_1_MOTORS']['Activated']==True:
+ MotorsList = PSENconfig.Dico['N_1_MOTORS']['Probability']
+
+ try :
+ continLines, continGroups, continTransfos, continLoads, continMotors, continVal, continProb = config_contingency(LinesList,GeneratorsList,TransformersList,LoadsList,MotorsList)
+ except IOError : # Si le fichier n'est pas dans un bon format on traite l'exception
+ nb_lines=1
+ print ('Error with contingency input file')
+ else :
+ continLines, continGroups, continTransfos, continLoads, continMotors, continVal, continProb = config_contingency(LinesList,GeneratorsList,TransformersList,LoadsList,MotorsList)
+
+ if len(continVal)>0:
+ N_1_fromFile = True
+ else:
+ N_1_fromFile = False
+
+ # Creation variable nom dossier N-1
+ if N_1_fromFile == True :
+ folderN_1 = '1_'
+ else :
+ folderN_1 = '_'
+
+
+ # Definition des variables pour les series temporelles
+
+ time_serie_flag=[]
+ time_serie_mat=[]
+ time_serie_time=[]
+ timeVect = []
+ for i,key in enumerate(CorrMatrix['laws']) :
+ if Laws[key]['Law']=='TimeSeries_from_file':
+ linesTS = Laws[key]['FileContents']
+ time_serie = 1 #raise the flag time_serie
+ tsm=[]
+ tVect=[]
+ for j in range (len(linesTS)) :
+ try:
+ tsm.append(float(commaToPoint(linesTS[j].split(';')[1])))
+ tVect.append(linesTS[j].split(';')[0])
+ except :
+ pass
+ time_serie_time.append(tVect)
+ time_serie_flag.append(1)
+ time_serie_mat.append(tsm)
+ else:
+ time_serie_flag.append(-1)
+ if N_1_fromFile==True:
+ time_serie_flag.append(-1)
+
+ #find shortest time series column
+ try:
+ time_serie
+ timeVect = time_serie_time[0]
+ for index, tV in enumerate(time_serie_time):
+ if len(tV) < len(timeVect):
+ timeVect = tV
+ except NameError:
+ pass
+
+ #change time Vector into iteration numbers (otherwise difficult for post processing)
+ N = len(timeVect)
+ timeVect = range(1, N+1)
+
+ time_serie_mat=list(zip(*time_serie_mat))
+
+ # Probabilistic Study: central dispersion => Monte Carlo or LHS iterations
+ if 'NUMBER_PACKAGE' in SimuParams:
+ nb_fix = int(SimuParams['NUMBER_PACKAGE'])
+ elif 'CONVERGENCE' in SimuParams:
+ if SimuParams['CONVERGENCE']==1:
+ nb_fix=0
+ else:
+ nb_fix=100
+ print ('\nALERT:\nConvergence not selected, and no number of packages chosen: default number= 100')
+ time.sleep(2)
+ #Extension name for the folders and files
+ day=time.strftime("%Y%m%d", time.gmtime())
+ hour=time.strftime("%Hh%Mm%S", time.gmtime())
+ # Enregistrement de l'heure de debut de simulation
+ f=open(exec_file, 'a')
+ start_time=time.clock()
+ f.write("Starting time: %f; Monte Carlo Size : %f; " % (start_time, SimuParams["SIZE_PACKAGE"]))
+ f.close()
+
+ try:
+ time_serie
+ except NameError:
+ num_cores=multiprocessing.cpu_count()-1
+ num_cores=1#Valentin
+ else:
+ num_cores=multiprocessing.cpu_count()
+ num_cores=1#Valentin
+
+ # Initialize the big folder
+ pathBigFolder = Paths['results_folder']+"/N"+folderN_1+day+"_"+hour
+ if not os.path.exists(pathBigFolder): os.makedirs(pathBigFolder)
+
+ #folder=Paths['results_folder']+"/N"+folderN_1+day #big folder
+ for j in range(num_cores):
+ # Initialize a folder per core
+ pathSmallFolder = pathBigFolder+'\package'+str(j)+"_N"+folderN_1+day+"_"+hour
+ if not os.path.exists(pathSmallFolder): os.makedirs(pathSmallFolder)
+
+
+ path_save = os.path.join(pathBigFolder, 'package0' + "_N" + folderN_1 + day + "_" + hour)
+ filew = open('temp1.txt', 'w')
+ filew.write(path_save + '\n')# sauvegarder le path de travail
+ filew.close()
+ stop = time.clock(); print(' Traitement PSENConfig ' + str(round(stop - start, 3)) + ' seconds'); start = stop;
+ Python3_path=PSENconfig.Dico['DIRECTORY']['Python3_path']
+ lancer = [Python3_path + '/python.exe',os.path.dirname(os.path.realpath(__file__))+ '/read_pfd_wrapper.py'] # changer le chemin de Python3 executable
+ proc = subprocess.Popen(lancer)
+ proc.wait()
+ stop = time.clock(); print('run read_pfd_wrapper.py in ' + str(round(stop - start, 3)) + ' seconds'); start = stop;
+
+
+ with open('param_base', 'rb') as fichier:
+ mon_depickler = pickle.Unpickler(fichier)
+ all_inputs_init= mon_depickler.load()
+ os.remove('param_base')
+ buses_base=all_inputs_init[0]
+ lines_base=all_inputs_init[1]
+ trans_base=all_inputs_init[2]
+ plants_base=all_inputs_init[3]
+ loads_base=all_inputs_init[4]
+ shunt_base=all_inputs_init[5]
+ motors_base=all_inputs_init[6]
+ trans3_base=all_inputs_init[7]
+ swshunt_base=all_inputs_init[8]
+
+
+########///////////////////////////////////////////////////////////##########
+ # Initialize size output
+ sizeY0=len(plants_base) #np.matrix(plants_base).shape[0]
+ sizeY1=len(buses_base) #np.matrix(buses_base).shape[0]
+ sizeY2=len(lines_base) #np.matrix(lines_base).shape[0]
+ sizeY3=len(loads_base) #np.matrix(loads_base).shape[0]
+ sizeY4=len(shunt_base) #np.matrix(shunt_base).shape[0]
+ sizeY5=len(trans_base) #np.matrix(trans_base).shape[0]
+ sizeY6=len(motors_base) #np.matrix(motors_base).shape[0]
+ sizeY7=len(trans3_base)
+ sizeY8=len(swshunt_base) #np.matrix(shunt_base).shape[0]
+ sizeY=[sizeY0,sizeY1,sizeY2,sizeY5,sizeY7,sizeY3,sizeY6,sizeY4,sizeY8]
+ sizeOutput=sizeY2
+
+ # Initialize the logger : write the headers
+ entete = ""
+ unit = ""
+ for key in CorrMatrix['laws']:
+ if Laws[key]['ComponentType']=='Generator':
+ if Laws[key]['Type']=='Generator Unavailability':
+ entete+="X:genStatus" + key + ";"
+ unit += ";"
+ else:
+ entete+="X:Gen" + key + "(%Pnom);"
+ unit += "%Pnom;"
+ elif Laws[key]['ComponentType']=='Load':
+ if Laws[key]['Type']=='Load Unavailability':
+ entete+="X:loadStatus" + key + ";"
+ unit += ";"
+ else:
+ entete+="X:Load" + key + "(p.u.);"
+ unit += "p.u.;"
+ elif Laws[key]['ComponentType']=='Line':
+ entete+="X:lineStatus" + key + ";"
+ unit += ";"
+ elif Laws[key]['ComponentType']=='Transformer':
+ entete+="X:transfoStatus" + key + ";"
+ unit += ";"
+ elif Laws[key]['ComponentType']=='Motor':
+ entete+="X:motorStatus" + key + ";"
+ unit += ";"
+ if N_1_fromFile==True:
+ entete += "X:N-1;"
+ unit += "component disconnected;"
+ entete2=entete + ";Y:NumTransitLine;Y:NumTransitTr;Y:NumVoltage;Y:GenTot;Y:LoadTot;Y:%Losses;Y:Max%ALine;Y:Max%ATr;Y:NumTransit_0.9-1Line;Y:NumTransit_0.9-1Tr;Y:AddedMVAR;Y:LoadShedding;Y:GensDisconnected;;"
+ if PFParams['ALGORITHM']=='Optimum Power Flow':
+ entete += ";Y:NumTransitLine;Y:NumTransitTr;Y:NumVoltage;Y:GenTot;Y:LoadTot;Y:%Losses;Y:Max%ALine;Y:Max%ATr;Y:NumTransit_0.9-1Line;Y:NumTransit_0.9-1Tr;Y:AddedMVAR;Y:LoadShedding;;"
+
+ unit2= unit + ';Num;Num;Num;MW;MW;%;%;%;Num;Num;MVAR;MW;[(bus, id),...];;'
+ if PFParams['ALGORITHM']=='Optimum Power Flow':
+ unit += ';Num;Num;Num;MW;MW;%;%;%;Num;Num;MVAR;MW;;'
+
+ string = "Iteration;;" + entete
+ unitstring = "Num;;" + unit
+ string2 = "Iteration;;" + entete2
+ unitstring2 = "Num;;" + unit2
+
+ logCSVfilename=[]
+ logCSVfilename_UC=[]
+ for i in range(num_cores):
+ logCSVfilename.append(pathBigFolder+'/package'+str(i)+"_N"+folderN_1+day+ "_" + hour + "/simulationDClog_"+hour+".csv") # Name of the file : global variable
+ logCSVfilename_UC.append(pathBigFolder+'/package'+str(i)+"_N"+folderN_1+day+ "_" + hour + "/simulationDClog_beforeUC_"+hour+".csv") # Name of the file : global variable
+ f = open(logCSVfilename[i], "a")
+ f2 = open(logCSVfilename_UC[i], "a")
+
+ f.write(string)
+ f2.write(string2)
+
+ # Names of the Output variables with the bus number
+ for name in range (sizeY0):
+ f.write("Y:PMachine"+str(plants_base[name][0])+"id"+ str(plants_base[name][2])+ ";")
+ f2.write("Y:PMachine"+str(plants_base[name][0])+"id"+ str(plants_base[name][2])+ ";")
+ for name in range (sizeY0):
+ f.write("Y:QMachine"+str(plants_base[name][0])+"id"+ str(plants_base[name][2])+";")
+ f2.write("Y:QMachine"+str(plants_base[name][0])+"id"+ str(plants_base[name][2])+";")
+ for name in range (sizeY1):
+ f.write("Y:VBus"+str(buses_base[name][0])+";")
+ f2.write("Y:VBus"+str(buses_base[name][0])+";")
+ for name in range (sizeY2):
+ f.write("Y"+str(name+1)+":%Rate "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
+ f2.write("Y"+str(name+1)+":%Rate "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
+ for name in range (sizeY2):
+ f.write("Y"+str(name+1)+":P "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
+ f2.write("Y"+str(name+1)+":P "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
+ for name in range (sizeY2):
+ f.write("Y"+str(name+1)+":Q "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
+ f2.write("Y"+str(name+1)+":Q "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
+ for name in range (sizeY5):
+ f.write("Y"+str(name+1)+":Tr%Rate "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
+ f2.write("Y"+str(name+1)+":Tr%Rate "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
+ for name in range (sizeY5):
+ f.write("Y"+str(name+1)+":TrP "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
+ f2.write("Y"+str(name+1)+":TrP "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
+ for name in range (sizeY5):
+ f.write("Y"+str(name+1)+":TrQ "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
+ f2.write("Y"+str(name+1)+":TrQ "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
+
+ for name in range (sizeY7):
+ f.write("Y"+str(name+1)+":Tr3%Rate "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
+ f2.write("Y"+str(name+1)+":Tr3%Rate "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
+ for name in range (sizeY7):
+ f.write("Y"+str(name+1)+":Tr3P "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
+ f2.write("Y"+str(name+1)+":Tr3P "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
+ for name in range (sizeY7):
+ f.write("Y"+str(name+1)+":Tr3Q "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
+ f2.write("Y"+str(name+1)+":Tr3Q "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
+ for name in range (sizeY3):
+ f.write("Y:Load "+str(loads_base[name][0])+" id"+ str(loads_base[name][5])+";")
+ f2.write("Y:Load "+str(loads_base[name][0])+" id"+ str(loads_base[name][5])+";")
+ for name in range (sizeY6):
+ f.write("Y:MotorP "+str(motors_base[name][0])+" id"+ str(motors_base[name][5])+";")
+ f2.write("Y:MotorP "+str(motors_base[name][0])+" id"+ str(motors_base[name][5])+";")
+ for name in range (sizeY6):
+ f.write("Y:MotorQ "+str(motors_base[name][0])+" id"+ str(motors_base[name][5])+";")
+ f2.write("Y:MotorQ "+str(motors_base[name][0])+" id"+ str(motors_base[name][5])+";")
+ for name in range (sizeY4):
+ f.write("Y:Shunt bus "+str(shunt_base[name][0])+" id"+ str(shunt_base[name][5])+";")
+ f2.write("Y:Shunt bus "+str(shunt_base[name][0])+" id"+ str(shunt_base[name][5])+";")
+ for name in range (sizeY8):
+ f.write("Y:Sw shunt bus "+str(swshunt_base[name][0])+";")
+ f2.write("Y:Sw shunt bus "+str(swshunt_base[name][0])+";")
+ f.write("\n")
+ f2.write("\n")
+ # Names of the Output variables with the bus names
+ f.write(unitstring)
+ f2.write(unitstring2)
+ for name in range (sizeY0):
+ f.write(str(plants_base[name][8]).replace('\n','')+";")
+ f2.write(str(plants_base[name][8]).replace('\n','')+";")
+ for name in range (sizeY0):
+ f.write(str(plants_base[name][8]).replace('\n','')+";")
+ f2.write(str(plants_base[name][8]).replace('\n','')+";")
+ for name in range (sizeY1):
+ f.write(str(buses_base[name][3]).replace("\n",'')+";")
+ f2.write(str(buses_base[name][3]).replace("\n",'')+";")
+ for name in range (sizeY2):
+ f.write(str(lines_base[name][8]).replace("\n",'').replace("-","_")+ " - " +str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ f2.write(str(lines_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ for name in range (sizeY2):
+ f.write(str(lines_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ f2.write(str(lines_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ for name in range (sizeY2):
+ f.write(str(lines_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ f2.write(str(lines_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ for name in range (sizeY5):
+ f.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ f2.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ for name in range (sizeY5):
+ f.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ f2.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ for name in range (sizeY5):
+ f.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ f2.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ for name in range (sizeY7):
+ f.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
+ f2.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
+ for name in range (sizeY7):
+ f.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
+ f2.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
+ for name in range (sizeY7):
+ f.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
+ f2.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
+ for name in range (sizeY3):
+ f.write(str(loads_base[name][4]).replace("\n",'')+";")
+ f2.write(str(loads_base[name][4]).replace("\n",'')+";")
+ for name in range (sizeY6):
+ f.write(str(motors_base[name][4]).replace("\n",'')+";")
+ f2.write(str(motors_base[name][4]).replace("\n",'')+";")
+ for name in range (sizeY6):
+ f.write(str(motors_base[name][4]).replace("\n",'')+";")
+ f2.write(str(motors_base[name][4]).replace("\n",'')+";")
+ for name in range (sizeY4):
+ f.write(str(shunt_base[name][3]).replace("\n",'')+";")
+ f2.write(str(shunt_base[name][3]).replace("\n",'')+";")
+ for name in range (sizeY8):
+ f.write(str(swshunt_base[name][3]).replace("\n",'')+";")
+ f2.write(str(swshunt_base[name][3]).replace("\n",'')+";")
+ f.write("\n")
+ f2.write("\n")
+ f.close()
+ f2.close()
+
+ if not PFParams['UNIT_COMMITMENT']:
+ for filename in logCSVfilename_UC:
+ os.remove(filename)
+
+ # Definition of size input/output
+ inputDim = len(Laws.keys())+ int(N_1_fromFile)
+ outputDim = 12
+
+ N_1_LINES = []
+ if ('N_1_LINES' in PSENconfig.Dico):
+ if PSENconfig.Dico['N_1_LINES']['Activated'] == True:
+ for N1 in PSENconfig.Dico['N_1_LINES']['Probability']:
+ if N1[1] != 0:
+ N_1_LINES.append(N1[0])
+ N_1_TRANSFORMERS = []
+ if ('N_1_TRANSFORMERS' in PSENconfig.Dico):
+ if PSENconfig.Dico['N_1_TRANSFORMERS']['Activated'] == True:
+ for N1 in PSENconfig.Dico['N_1_TRANSFORMERS']['Probability']:
+ if N1[1] != 0:
+ N_1_TRANSFORMERS.append(N1[0])
+ N_1_MOTORS = []
+ if ('N_1_MOTORS' in PSENconfig.Dico):
+ if PSENconfig.Dico['N_1_MOTORS']['Activated'] == True:
+ for N1 in PSENconfig.Dico['N_1_MOTORS']['Probability']:
+ if N1[1] != 0:
+ N_1_MOTORS.append(N1[0])
+ N_1_LOADS = []
+ if ('N_1_LOADS' in PSENconfig.Dico):
+ if PSENconfig.Dico['N_1_LOADS']['Activated'] == True:
+ for N1 in PSENconfig.Dico['N_1_LOADS']['Probability']:
+ if N1[1] != 0:
+ N_1_LOADS.append(N1[0])
+ N_1_GENERATORS = []
+ if ('N_1_GENERATORS' in PSENconfig.Dico):
+ if PSENconfig.Dico['N_1_GENERATORS']['Activated'] == True:
+ for N1 in PSENconfig.Dico['N_1_GENERATORS']['Probability']:
+ if N1[1] != 0:
+ N_1_GENERATORS.append(N1[0])
+
+
+ #Create dictionnary for different useful values to use psse function
+ dico={'TStest':0,'Xt':[],'sizeY0':sizeY0,'sizeY1':sizeY1,'sizeY2':sizeY2,\
+ 'sizeY3':sizeY3,'sizeY4':sizeY4,'sizeY5':sizeY5,'sizeY6':sizeY6,'sizeY7':sizeY7,'sizeY8':sizeY8, 'sizeY':sizeY,\
+ 'folder':pathBigFolder,'folderN_1':folderN_1,\
+ 'day':day,'hour':hour, 'position':0,'PFParams': PFParams,\
+ 'lenpac':SimuParams['SIZE_PACKAGE'],\
+ 'num_pac':0,'logCSVfilename':logCSVfilename,'logCSVfilename_UC':logCSVfilename_UC,'Laws':Laws,'CorrMatrix': CorrMatrix,\
+ 'Generators':PSENconfig.MachineDico, 'Loads':PSENconfig.LoadDico, 'Motors':PSENconfig.MotorDico,\
+ 'Lines':PSENconfig.LineDico, 'Transformers':PSENconfig.TransfoDico,\
+ 'doc_base':'','continLines':continLines,'continTransfos':continTransfos,'timeVect':[],\
+ 'continGroups':continGroups,'continLoads':continLoads,'continMotors':continMotors,'continVal':continVal,'continProb':continProb,\
+ 'N_1_fromFile': N_1_fromFile,'all_inputs_init':all_inputs_init,'N_1_LINES':N_1_LINES, 'N_1_TRANSFORMERS':N_1_TRANSFORMERS,'N_1_MOTORS':N_1_MOTORS,'N_1_LOADS':N_1_LOADS,'N_1_GENERATORS':N_1_GENERATORS,'Paths':Paths}
+
+ if PFParams["ALGORITHM"]=="Optimum Power Flow":
+ dico['flag2']=int(PFParams['LS_Q_CONVERGENCE_CRITERIA'])
+ dico['UnitCommitment']= PFParams['UNIT_COMMITMENT']
+ else:
+ dico['flag2']=False
+ dico['UnitCommitment']=False
+#===============================================================================
+# EXECUTION
+#===============================================================================
+ print ("\n\n\n Starting PSEN ")
+
+ # inputSamp=[]
+ outputSampleAll=NumericalSample(0,12)#initialization
+ ymachine=NumericalSample(0,sizeY0)
+
+ try :
+ time_serie
+ print('Time series')
+ dico['TStest']=1
+ Xt=[]
+ for i in range (len(time_serie_mat)) : #as many as there are points in the time serie
+
+ Xt0=[]
+ n=0
+ for j in range (len(time_serie_flag)) : #for each variable
+
+ if time_serie_flag[j] == -1 : #if not a time series
+ Xt0.append(-1)
+ n+=1
+ else :
+ Xt0.append(time_serie_mat[i][j-n]) #append the element
+
+ Xt.append(Xt0)
+ dico['Xt']=Xt
+ dico['timeVect']=timeVect[0:len(Xt)]
+ dico['lenpac']=len(Xt)
+ nb_fix = 1
+
+
+ except NameError :
+ print ('Probabilistic')
+
+
+ dico['doc_base'] = os.path.join(pathBigFolder, 'package0' + "_N" + folderN_1 + day + "_" + hour)
+
+ liste_dico = []
+ liste_dico.append(dico.copy())
+ os.environ['PATH'] += ';' + dico['doc_base'] # add the path of each directory
+ Ind1, Ind2, output, inputSamp, Pmachine=Calculation(liste_dico[0].copy(),nb_fix,cmd_Path)# lancer les calculs OPF
+
+
+# try :
+# time_serie
+# except NameError :
+# print ('Probabilistic')
+# dico['doc_base'] = os.path.join(pathBigFolder, 'package0' + "_N" + folderN_1 + day + "_" + hour)
+#
+# liste_dico = []
+# liste_dico.append(dico.copy())
+# os.environ['PATH'] += ';' + dico['doc_base'] # add the path of each directory
+# Ind1, Ind2, output, inputSamp, Pmachine=Calculation(liste_dico[0].copy(),nb_fix,cmd_Path)# lancer les calculs OPF
+#
+#
+# else:
+# print('Time series')
+# dico['TStest']=1
+# Xt=[]
+# for i in range (len(time_serie_mat)) : #as many as there are points in the time serie
+#
+# Xt0=[]
+# n=0
+# for j in range (len(time_serie_flag)) : #for each variable
+#
+# if time_serie_flag[j] == -1 : #if not a time series
+# Xt0.append(-1)
+# n+=1
+# else :
+# Xt0.append(time_serie_mat[i][j-n]) #append the element
+#
+# Xt.append(Xt0)
+#
+# liste_dico=[]
+# ipos=0
+#
+# RandomGenerator.SetSeed(os.getpid())
+# inputDistribution=create_dist(dico)
+# samples=[]
+#
+# dico['doc_base'] = os.path.join(pathBigFolder, 'package0' + "_N" + folderN_1 + day + "_" + hour)
+#
+# dico['Xt']=Xt
+# dico['timeVect']=timeVect[0:len(Xt)]
+## dico['Xt']=Xt[ipos:int(((i+1)*round(float(len(Xt))/float(num_cores))))]
+## dico['timeVect']=timeVect[ipos:int(((i+1)*round(float(len(Xt))/float(num_cores))))]
+## ipos=int(((i+1)*round(float(len(Xt))/float(num_cores))))
+#
+# myMCE = MonteCarloExperiment(inputDistribution,len(dico['Xt']))
+# Samp = myMCE.generate()
+# samples.append(Samp)
+#
+# liste_dico.append(dico.copy()) #append a new dico to the list
+# os.environ['PATH'] += ';' + dico['doc_base'] #add the path of each directory
+#
+# inputSamp, output, Pmachine, LS, FS, LStable, FStable, Output_beforeUC, Pmachine_beforeUC, LS_beforeUC, FS_beforeUC, LStable_beforeUC, FStable_beforeUC = PFFunct(liste_dico[0].copy(),np.array(samples[0]))
+#
+## for l in range(num_cores):
+## print "launching PACKAGE "+str(l)
+## p= po.apply_async(PSSEFunct,args=(liste_dico[l].copy(),np.array(samples[l]),),\
+## callback=function_callback_psse) #callback function
+
+
+
+#===============================================================================
+# RECUPERATION DONNEES DE SORTIES ET ECRITURE CSV - OUTPUT RETRIEVAL
+#===============================================================================
+
+ print( "Finished multiprocessing")
+
+ for i in Pmachine:
+ ymachine.add(NumericalPoint(i))
+ ymachineMean=ymachine.computeMean()
+
+ for i in output:
+ outputSampleAll.add(NumericalPoint(i))
+ outputDim=outputSampleAll.getDimension()
+ outputSize=outputSampleAll.getSize()
+
+ inputSample=NumericalSample(0,inputDim)
+ for i in inputSamp:
+ inputSample.add(NumericalPoint(i))
+
+ outputSample=NumericalSample(0,outputDim)
+ outputSampleMissed=NumericalSample(0,outputDim)
+
+ for i in range (outputSize):
+ #if outputSampleAll[i,inputDim]==0 :
+ if outputSampleAll[i,3]==0 :
+ outputSampleMissed.add(outputSampleAll[i])
+ else :
+ outputSample.add(outputSampleAll[i])
+
+ outputDescription=[]
+ for i in range (outputDim):
+ outputDescription.append("Y"+str(i))
+ outputSample.setDescription( outputDescription )
+
+ # Get the empirical mean and standard deviations
+ empMeanX = inputSample.computeMean()
+ empSdX = inputSample.computeStandardDeviationPerComponent()
+
+ if int(outputSample.getSize())>0:
+ empiricalMean = outputSample.computeMean()
+ empiricalSd = outputSample.computeStandardDeviationPerComponent()
+ else:
+ print ("ALERT: Not a single scenario converged")
+ empiricalMean = ["-"]*outputDim
+ empiricalSd = ["-"]*outputDim
+
+ # Writing
+ CSVfilename=pathBigFolder+"\simulation_interestValues"+hour+".csv" # Name of the file : global variable
+ f = open(CSVfilename, "a")
+ f.write('CASES SIMULATED: '+str(outputSize)+'\n\n')
+
+ f.write(';;Mean;Standard deviation\n')
+
+ entete=entete.split(';')
+ unit=unit.split(';')
+
+ for name in range (inputDim+outputDim+sizeY0):
+
+ if (name<inputDim):
+ f.write(entete[name]+';'+unit[name]+';'+\
+ str(empMeanX[name])+';'+str(empSdX[name])+'\n')
+ if name==inputDim:
+ f.write('\n')
+## f.write('\n'+entete[name]+';'+unit[name]+';'\
+## +str(empiricalMean[name-inputDim])+';'+\
+## str(empiricalSd[name-inputDim])+'\n')
+ if (inputDim<name<inputDim+outputDim):
+ #pdb.set_trace()
+ f.write(entete[name]+';'+unit[name]+';'\
+ +str(empiricalMean[name-inputDim-1])+';'+\
+ str(empiricalSd[name-inputDim-1])+'\n')
+ if name==(inputDim+outputDim):
+ f.write("\nY:PMachine"+str(plants_base[name-(inputDim+outputDim)][0])+";"\
+ +str(plants_base[name-(inputDim+outputDim)][8])+';'+\
+ str(ymachineMean[name-(inputDim+outputDim)])+"\n")
+ if (inputDim+outputDim<name):
+ f.write("Y:PMachine"+str(plants_base[name-(inputDim+outputDim)][0])+";"\
+ +str(plants_base[name-(inputDim+outputDim)][8])+';'+\
+ str(ymachineMean[name-(inputDim+outputDim)])+"\n")
+
+ if (int(PFParams['LS_Q_CONVERGENCE_CRITERIA'])): #if criteria on Load shed and mvar
+ f.write('\n\nIndicator Load Shedding=;')
+
+ f.write('Indicator Fixed Shunt=;')
+
+ else:
+ f.write('\n\nIndicator NumVoltage=;')
+
+ f.write('Indicator NumTransit=;')
+
+ f.write('\n')
+ for i in range(len(Ind1)):
+ f.write(str(Ind1[i])+';')
+ f.write(str(Ind2[i])+'\n')
+
+ f.close()
+
+ CSVcomplete_filename=pathBigFolder+"\simulationDClog_complete_"+hour+".csv" # Name of the file : global variable
+ f=open(CSVcomplete_filename,"a")
+
+ # liste_dico2 = []
+ # for k,dico in enumerate(liste_dico):
+ # package_folder = dico['doc_base']
+ # if os.path.isfile(os.path.join(dico['doc_base'],'Case_1.sav')):
+ # liste_dico2.append(dico)
+ # else:
+ # shutil.rmtree(dico['doc_base'])
+
+ if dico['TStest']==1: #if Time series, different output file format
+ for k,dico in enumerate(liste_dico):
+ package_folder = dico['doc_base']
+ package_resultsfile = package_folder + "\\simulationDClog_" + hour + ".csv"
+ g = open(package_resultsfile,"r")
+
+ if k==0:
+ f.write(g.read())
+ else:
+ g_contents = g.read()
+ g_contents2 = g_contents.split('\n')
+ g_contents_noheaders = '\n'.join(g_contents2[2:])
+## g_contents_noheaders = ''
+## for m in range(2,len(g_contents2)):
+## g_contents_noheaders+=g_contents2[m] + '\n'
+ f.write(g_contents_noheaders)
+ g.close()
+
+ else: #if probabilistic, must treat table output
+ for k,dico in enumerate(liste_dico):
+ package_folder = dico['doc_base']
+ package_resultsfile = package_folder + "\\simulationDClog_" + hour + ".csv"
+ g = open(package_resultsfile,"r")
+
+ if k==0:
+ g_contents=g.read()
+ g_headers = g_contents.partition('\n')[0] + "\n"
+ g_contents0 = g_contents.partition('\n')[2]
+ g_headers += g_contents0.partition('\n')[0] + "\n"
+ g_contents_noheaders = g_contents0.partition('\n')[2]
+ g_iterations = g_contents_noheaders.partition('\n\n')[0]
+ it_num = len(g_iterations.split('\n'))
+ g_summarytable = g_contents_noheaders.partition('\n\n')[2]
+ f.write(g_headers)
+ f.write(g_iterations)
+ f.write('\n')
+ else:
+ g_contents = g.read()
+ g_contents_noheaders0 = g_contents.partition('\n')[2]
+ g_contents_noheaders = g_contents_noheaders0.partition('\n')[2]
+ g_iterations = g_contents_noheaders.partition('\n\n')[0]
+ g_summarytable2 = g_contents_noheaders.partition('\n\n')[2]
+ for line in g_summarytable2.split('\n')[2:]:
+ if line != '':
+ g_summarytable += line
+ g_iterations_newnumbers = ""
+ for line in g_iterations.split("\n"): #increment iteration numbers
+ it_num += 1
+ cells=line.split(';')
+ cells[0]=str(it_num)
+ newline=";".join(cells)+'\n'
+ g_iterations_newnumbers+=newline
+ f.write(g_iterations_newnumbers)
+ g.close()
+
+ f.write('\n\n' + g_summarytable) #write summary table at end
+
+ f.close()
+
+ if PFParams['ALGORITHM']=='Optimum Power Flow':
+ if PFParams['UNIT_COMMITMENT']:
+ # Write the second csv
+ CSVcomplete_filename=pathBigFolder+"\simulationDClog_beforeUC_complete_"+hour+".csv" # Name of the file : global variable
+ f=open(CSVcomplete_filename,"a")
+ if dico['TStest']==1: #if Time series, different output file format
+ for k,dico in enumerate(liste_dico):
+ package_folder = dico['doc_base']
+ package_resultsfile = package_folder + "\\simulationDClog_beforeUC_" + hour + ".csv"
+ g = open(package_resultsfile,"r")
+
+ if k==0:
+ f.write(g.read())
+ else:
+ g_contents = g.read()
+ g_contents2 = g_contents.split('\n')
+ g_contents_noheaders = '\n'.join(g_contents2[2:])
+ f.write(g_contents_noheaders)
+ g.close()
+
+ else: #if probabilistic, must treat table output
+ for k,dico in enumerate(liste_dico):
+ ExtraNL = False
+ package_folder = dico['doc_base']
+ package_resultsfile = package_folder + "\\simulationDClog_beforeUC_" + hour + ".csv"
+ g = open(package_resultsfile,"r")
+
+ if k==0:
+ g_contents=g.read()
+ g_headers = g_contents.partition('\n')[0] + "\n"
+ g_contents0 = g_contents.partition('\n')[2]
+ g_headers += g_contents0.partition('\n')[0] + "\n"
+ g_contents_noheaders = g_contents0.partition('\n')[2]
+ g_iterations = g_contents_noheaders.partition('\n\n')[0]
+ g_iterations_split = g_iterations.split('\n')
+ if g_iterations_split[-1]=="":
+ g_iterations_split = g_iterations_split[0:-1]
+ it_num = len(g_iterations_split)
+ g_summarytable = g_contents_noheaders.partition('\n\n')[2]
+ f.write(g_headers)
+ #f.write(g_iterations)
+ for line in g_iterations_split:
+ f.write(line)
+ f.write('\n')
+ #f.write('\n')
+ else:
+ g_contents = g.read()
+ g_contents_noheaders0 = g_contents.partition('\n')[2]
+ g_contents_noheaders = g_contents_noheaders0.partition('\n')[2]
+ g_iterations = g_contents_noheaders.partition('\n\n')[0]
+ g_iterations_split = g_iterations.split('\n')
+ if g_iterations_split[-1]=="":
+ g_iterations_split = g_iterations_split[0:-1]
+ g_summarytable2 = g_contents_noheaders.partition('\n\n')[2]
+ for line in g_summarytable2.split('\n')[2:]:
+ if line != '':
+ g_summarytable += line
+ g_iterations_newnumbers = ""
+ for line in g_iterations_split: #increment iteration numbers
+ it_num += 1
+ cells=line.split(';')
+ cells[0]=str(it_num)
+ newline=";".join(cells)+'\n'
+ g_iterations_newnumbers+=newline
+ f.write(g_iterations_newnumbers)
+ g.close()
+
+ f.write('\n\n' + g_summarytable) #write summary table at end
+
+ f.close()
+
+
+ #convert decimal separator to commas for csv files
+ if PFParams['DECIMAL_SEPARATOR']==",":
+ csvlist = []
+ for path, subdirs, files in os.walk(pathBigFolder):
+ for name in files:
+ if name.endswith(".csv"):
+ csvlist.append(os.path.join(path, name))
+ for csvfile in csvlist:
+ h = open(csvfile,"r")
+ crd = csv.reader(h,delimiter=";")
+ csvfiletemp = csvfile[0:-4] + "0" + ".csv"
+ g = open(csvfiletemp, "w", newline='\n')
+ cwt = csv.writer(g, delimiter=";")
+ for row in crd:
+ rowwcommas = []
+ for item in row:
+ try:
+ isnum = float(item)+1
+ rowwcommas.append(str(item).replace(".",","))
+ except:
+ rowwcommas.append(item)
+ cwt.writerow(rowwcommas)
+ h.close()
+ g.close()
+ os.remove(csvfile)
+ shutil.copy2(csvfiletemp, csvfile)
+ os.remove(csvfiletemp)
+
+ f=open(exec_file,'a')
+ stop_time=time.clock()
+ stop_time=time.clock()
+ f.write("Stop time: %f; Duration: %f; Time per execution: %f; " \
+ % (round(stop_time), round(stop_time-start_time), round((stop_time-start_time)/outputSize)))
+ f.write("\n\n")
+ f.close()
+
+ print('\n\nSimulated '+str(outputSize)+' cases in '+ str(round(stop_time-start_time))+\
+ ' seconds. Average '+str(round((stop_time-start_time)/outputSize,2))+'s per case.')
+
+ nMissed=int(outputSampleMissed.getSize())
+
+ print ('\n\n Non-convergence rate is '+str(round(nMissed*100/outputSize,3))\
+ +' % ('+str(outputSampleMissed.getSize())+' cases out of '+str(outputSize)+')')
+
+ #graphical_out(inputSample, outputSampleAll, inputDim, outputDim, montecarlosize)
+stop_total = time.clock();
+print('run total in '+ str(round(stop_total - start_total, 3)) + ' seconds');
--- /dev/null
+MachineDico = {'WIND30__Gr1': {'PMIN': 0.0, 'EXNAME': 'WIND30 30.000', 'NAME': 'WIND30', 'NUMBER': 18, 'QMAX': 0.0, 'Q': 0.0, 'P': 20.0, 'QMIN': 0.0, 'ID': '1 ', 'PMAX': 20.0}, 'NDIESELG1__Gr1': {'PMIN': 0.0, 'EXNAME': 'NDIESELG1 11.000', 'NAME': 'NDIESELG1', 'NUMBER': 6, 'QMAX': 10.235971450805664, 'Q': 0.14257816970348358, 'P': 10.647665023803711, 'QMIN': -7.048243522644043, 'ID': '1 ', 'PMAX': 17.100000381469727}, 'HYDRO30__Gr1': {'PMIN': 0.0, 'EXNAME': 'HYDRO30 30.000', 'NAME': 'HYDRO30', 'NUMBER': 16, 'QMAX': 24.0, 'Q': 0.0001832990237744525, 'P': 40.0, 'QMIN': 0.0, 'ID': '1 ', 'PMAX': 40.0}, 'SOLAR30__Gr1': {'PMIN': 0.0, 'EXNAME': 'SOLAR30 30.000', 'NAME': 'SOLAR30', 'NUMBER': 19, 'QMAX': 0.0, 'Q': 0.0, 'P': 15.000000953674316, 'QMIN': 0.0, 'ID': '1 ', 'PMAX': 15.000000953674316}, 'NDIESELG3__Gr1': {'PMIN': 0.0, 'EXNAME': 'NDIESELG3 11.000', 'NAME': 'NDIESELG3', 'NUMBER': 8, 'QMAX': 10.235971450805664, 'Q': 0.14257816970348358, 'P': 10.647665023803711, 'QMIN': -7.048243522644043, 'ID': '1 ', 'PMAX': 17.100000381469727}, 'NDIESELG2__Gr1': {'PMIN': 0.0, 'EXNAME': 'NDIESELG2 11.000', 'NAME': 'NDIESELG2', 'NUMBER': 7, 'QMAX': 10.235971450805664, 'Q': 0.14257816970348358, 'P': 10.647665023803711, 'QMIN': -7.048243522644043, 'ID': '1 ', 'PMAX': 17.100000381469727}, 'NDIESELG4__Gr1': {'PMIN': 0.0, 'EXNAME': 'NDIESELG4 11.000', 'NAME': 'NDIESELG4', 'NUMBER': 9, 'QMAX': 10.235971450805664, 'Q': 0.14257816970348358, 'P': 10.647665023803711, 'QMIN': -7.048243522644043, 'ID': '1 ', 'PMAX': 17.100000381469727}, 'ODIESELG2__Gr1': {'PMIN': 0.0, 'EXNAME': 'ODIESELG2 11.000', 'NAME': 'ODIESELG2', 'NUMBER': 2, 'QMAX': 8.220000267028809, 'Q': 3.820113182067871, 'P': 4.771888484356168e-07, 'QMIN': -6.849999904632568, 'ID': '1 ', 'PMAX': 13.699999809265137}, 'ODIESELG4__Gr1': {'PMIN': 0.0, 'EXNAME': 'ODIESELG4 11.000', 'NAME': 'ODIESELG4', 'NUMBER': 4, 'QMAX': 8.220000267028809, 'Q': 3.820113182067871, 'P': 4.771888484356168e-07, 'QMIN': -6.849999904632568, 'ID': '1 ', 'PMAX': 13.699999809265137}, 'ODIESELG3__Gr1': {'PMIN': 0.0, 'EXNAME': 'ODIESELG3 11.000', 'NAME': 'ODIESELG3', 'NUMBER': 3, 'QMAX': 8.220000267028809, 'Q': 3.820113182067871, 'P': 4.771888484356168e-07, 'QMIN': -6.849999904632568, 'ID': '1 ', 'PMAX': 13.699999809265137}, 'ODIESELG1__Gr1': {'PMIN': 0.0, 'EXNAME': 'ODIESELG1 11.000', 'NAME': 'ODIESELG1', 'NUMBER': 1, 'QMAX': 8.220000267028809, 'Q': 3.8200631141662598, 'P': 4.771888484356168e-07, 'QMIN': -6.849999904632568, 'ID': '1 ', 'PMAX': 13.699999809265137}}
+LoadDico = {'ODIESEL__Lo1': {'EXNAME': 'ODIESEL 30.000', 'NAME': 'ODIESEL', 'NUMBER': 5, 'Q': 14.5, 'P': 30.000001907348633, 'ID': '1 '}, 'CITYB30__Lo1': {'EXNAME': 'CITYB30 30.000', 'NAME': 'CITYB30', 'NUMBER': 12, 'Q': 24.5, 'P': 50.0, 'ID': '1 '}, 'CITYD30__Lo1': {'EXNAME': 'CITYD30 30.000', 'NAME': 'CITYD30', 'NUMBER': 15, 'Q': 7.25, 'P': 15.000000953674316, 'ID': '1 '}, 'CITYC30__Lo1': {'EXNAME': 'CITYC30 30.000', 'NAME': 'CITYC30', 'NUMBER': 14, 'Q': 9.75, 'P': 20.0, 'ID': '1 '}}
+LineDico = {'NDIESEL__HYDRO90__Li1': {'TONAME': 'HYDRO90', 'FROMNUMBER': 10, 'FROMEXNAME': 'NDIESEL 90.000', 'FROMNAME': 'NDIESEL', 'TOEXNAME': 'HYDRO90 90.000', 'TONUMBER': 17, 'ID': '1 '}, 'CITYC90__SOLAR90__Li1': {'TONAME': 'SOLAR90', 'FROMNUMBER': 13, 'FROMEXNAME': 'CITYC90 90.000', 'FROMNAME': 'CITYC90', 'TOEXNAME': 'SOLAR90 90.000', 'TONUMBER': 20, 'ID': '1 '}, 'NDIESEL__CITYB90__Li1': {'TONAME': 'CITYB90', 'FROMNUMBER': 10, 'FROMEXNAME': 'NDIESEL 90.000', 'FROMNAME': 'NDIESEL', 'TOEXNAME': 'CITYB90 90.000', 'TONUMBER': 11, 'ID': '1 '}, 'NDIESEL__CITYB90__Li2': {'TONAME': 'CITYB90', 'FROMNUMBER': 10, 'FROMEXNAME': 'NDIESEL 90.000', 'FROMNAME': 'NDIESEL', 'TOEXNAME': 'CITYB90 90.000', 'TONUMBER': 11, 'ID': '2 '}, 'CITYC90__HYDRO90__Li1': {'TONAME': 'HYDRO90', 'FROMNUMBER': 13, 'FROMEXNAME': 'CITYC90 90.000', 'FROMNAME': 'CITYC90', 'TOEXNAME': 'HYDRO90 90.000', 'TONUMBER': 17, 'ID': '1 '}, 'ODIESEL__JUNCTION30__Li1': {'TONAME': 'JUNCTION30', 'FROMNUMBER': 5, 'FROMEXNAME': 'ODIESEL 30.000', 'FROMNAME': 'ODIESEL', 'TOEXNAME': 'JUNCTION30 30.000', 'TONUMBER': 21, 'ID': '1 '}, 'CITYB90__CITYC90__Li1': {'TONAME': 'CITYC90', 'FROMNUMBER': 11, 'FROMEXNAME': 'CITYB90 90.000', 'FROMNAME': 'CITYB90', 'TOEXNAME': 'CITYC90 90.000', 'TONUMBER': 13, 'ID': '1 '}, 'WIND30__JUNCTION30__Li1': {'TONAME': 'JUNCTION30', 'FROMNUMBER': 18, 'FROMEXNAME': 'WIND30 30.000', 'FROMNAME': 'WIND30', 'TOEXNAME': 'JUNCTION30 30.000', 'TONUMBER': 21, 'ID': '1 '}, 'CITYD30__JUNCTION30__Li1': {'TONAME': 'JUNCTION30', 'FROMNUMBER': 15, 'FROMEXNAME': 'CITYD30 30.000', 'FROMNAME': 'CITYD30', 'TOEXNAME': 'JUNCTION30 30.000', 'TONUMBER': 21, 'ID': '1 '}, 'HYDRO90__SOLAR90__Li1': {'TONAME': 'SOLAR90', 'FROMNUMBER': 17, 'FROMEXNAME': 'HYDRO90 90.000', 'FROMNAME': 'HYDRO90', 'TOEXNAME': 'SOLAR90 90.000', 'TONUMBER': 20, 'ID': '1 '}, 'CITYD30__SOLAR30__Li1': {'TONAME': 'SOLAR30', 'FROMNUMBER': 15, 'FROMEXNAME': 'CITYD30 30.000', 'FROMNAME': 'CITYD30', 'TOEXNAME': 'SOLAR30 30.000', 'TONUMBER': 19, 'ID': '1 '}, 'HYDRO30__WIND30__Li2': {'TONAME': 'WIND30', 'FROMNUMBER': 16, 'FROMEXNAME': 'HYDRO30 30.000', 'FROMNAME': 'HYDRO30', 'TOEXNAME': 'WIND30 30.000', 'TONUMBER': 18, 'ID': '2 '}, 'HYDRO30__WIND30__Li1': {'TONAME': 'WIND30', 'FROMNUMBER': 16, 'FROMEXNAME': 'HYDRO30 30.000', 'FROMNAME': 'HYDRO30', 'TOEXNAME': 'WIND30 30.000', 'TONUMBER': 18, 'ID': '1 '}}
+TransfoDico = {'ODIESELG2__ODIESEL__Tr1': {'TONAME': 'ODIESEL', 'FROMNUMBER': 2, '#WIND': 2, 'FROMEXNAME': 'ODIESELG2 11.000', 'FROMNAME': 'ODIESELG2', 'TOEXNAME': 'ODIESEL 30.000', 'TONUMBER': 5, 'ID': '1 '}, 'NDIESELG3__NDIESEL__Tr1': {'TONAME': 'NDIESEL', 'FROMNUMBER': 8, '#WIND': 2, 'FROMEXNAME': 'NDIESELG3 11.000', 'FROMNAME': 'NDIESELG3', 'TOEXNAME': 'NDIESEL 90.000', 'TONUMBER': 10, 'ID': '1 '}, 'ODIESEL__NDIESEL__Tr1': {'TONAME': 'NDIESEL', 'FROMNUMBER': 5, '#WIND': 2, 'FROMEXNAME': 'ODIESEL 30.000', 'FROMNAME': 'ODIESEL', 'TOEXNAME': 'NDIESEL 90.000', 'TONUMBER': 10, 'ID': '1 '}, 'SOLAR30__SOLAR90__Tr1': {'TONAME': 'SOLAR90', 'FROMNUMBER': 19, '#WIND': 2, 'FROMEXNAME': 'SOLAR30 30.000', 'FROMNAME': 'SOLAR30', 'TOEXNAME': 'SOLAR90 90.000', 'TONUMBER': 20, 'ID': '1 '}, 'NDIESELG2__NDIESEL__Tr1': {'TONAME': 'NDIESEL', 'FROMNUMBER': 7, '#WIND': 2, 'FROMEXNAME': 'NDIESELG2 11.000', 'FROMNAME': 'NDIESELG2', 'TOEXNAME': 'NDIESEL 90.000', 'TONUMBER': 10, 'ID': '1 '}, 'HYDRO30__HYDRO90__Tr1': {'TONAME': 'HYDRO90', 'FROMNUMBER': 16, '#WIND': 2, 'FROMEXNAME': 'HYDRO30 30.000', 'FROMNAME': 'HYDRO30', 'TOEXNAME': 'HYDRO90 90.000', 'TONUMBER': 17, 'ID': '1 '}, 'CITYC90__CITYC30__Tr1': {'TONAME': 'CITYC30', 'FROMNUMBER': 13, '#WIND': 2, 'FROMEXNAME': 'CITYC90 90.000', 'FROMNAME': 'CITYC90', 'TOEXNAME': 'CITYC30 30.000', 'TONUMBER': 14, 'ID': '1 '}, 'NDIESELG1__NDIESEL__Tr1': {'TONAME': 'NDIESEL', 'FROMNUMBER': 6, '#WIND': 2, 'FROMEXNAME': 'NDIESELG1 11.000', 'FROMNAME': 'NDIESELG1', 'TOEXNAME': 'NDIESEL 90.000', 'TONUMBER': 10, 'ID': '1 '}, 'HYDRO30__HYDRO90__Tr2': {'TONAME': 'HYDRO90', 'FROMNUMBER': 16, '#WIND': 2, 'FROMEXNAME': 'HYDRO30 30.000', 'FROMNAME': 'HYDRO30', 'TOEXNAME': 'HYDRO90 90.000', 'TONUMBER': 17, 'ID': '2 '}, 'CITYB90__CITYB30__Tr1': {'TONAME': 'CITYB30', 'FROMNUMBER': 11, '#WIND': 2, 'FROMEXNAME': 'CITYB90 90.000', 'FROMNAME': 'CITYB90', 'TOEXNAME': 'CITYB30 30.000', 'TONUMBER': 12, 'ID': '1 '}, 'CITYB90__CITYB30__Tr2': {'TONAME': 'CITYB30', 'FROMNUMBER': 11, '#WIND': 2, 'FROMEXNAME': 'CITYB90 90.000', 'FROMNAME': 'CITYB90', 'TOEXNAME': 'CITYB30 30.000', 'TONUMBER': 12, 'ID': '2 '}, 'HYDRO30__HYDRO90__Tr3': {'TONAME': 'HYDRO90', 'FROMNUMBER': 16, '#WIND': 2, 'FROMEXNAME': 'HYDRO30 30.000', 'FROMNAME': 'HYDRO30', 'TOEXNAME': 'HYDRO90 90.000', 'TONUMBER': 17, 'ID': '3 '}, 'SOLAR30__SOLAR90__Tr2': {'TONAME': 'SOLAR90', 'FROMNUMBER': 19, '#WIND': 2, 'FROMEXNAME': 'SOLAR30 30.000', 'FROMNAME': 'SOLAR30', 'TOEXNAME': 'SOLAR90 90.000', 'TONUMBER': 20, 'ID': '2 '}, 'ODIESELG3__ODIESEL__Tr1': {'TONAME': 'ODIESEL', 'FROMNUMBER': 3, '#WIND': 2, 'FROMEXNAME': 'ODIESELG3 11.000', 'FROMNAME': 'ODIESELG3', 'TOEXNAME': 'ODIESEL 30.000', 'TONUMBER': 5, 'ID': '1 '}, 'NDIESELG4__NDIESEL__Tr1': {'TONAME': 'NDIESEL', 'FROMNUMBER': 9, '#WIND': 2, 'FROMEXNAME': 'NDIESELG4 11.000', 'FROMNAME': 'NDIESELG4', 'TOEXNAME': 'NDIESEL 90.000', 'TONUMBER': 10, 'ID': '1 '}, 'ODIESELG4__ODIESEL__Tr1': {'TONAME': 'ODIESEL', 'FROMNUMBER': 4, '#WIND': 2, 'FROMEXNAME': 'ODIESELG4 11.000', 'FROMNAME': 'ODIESELG4', 'TOEXNAME': 'ODIESEL 30.000', 'TONUMBER': 5, 'ID': '1 '}, 'ODIESELG1__ODIESEL__Tr1': {'TONAME': 'ODIESEL', 'FROMNUMBER': 1, '#WIND': 2, 'FROMEXNAME': 'ODIESELG1 11.000', 'FROMNAME': 'ODIESELG1', 'TOEXNAME': 'ODIESEL 30.000', 'TONUMBER': 5, 'ID': '1 '}}
+MotorDico = {}
+
+Dico ={'DIRECTORY': {'PSSPY_path': 'C:\\Program Files (x86)\\PTI\\PSSE34\\PSSPY27', 'PSSE_path': 'C:\\Program Files (x86)\\PTI\\PSSE34\\PSSBIN', 'sav_file': 'X:/Small Grid PSSE/TestIsland_2015_OPF - Areas.sav', 'results_folder': 'X:/Small Grid PSSE/Results'}, 'PSSE_PARAMETERS': {'UNIT_COMMITMENT': True, 'I_MAX': 'RateA', 'DECIMAL_SEPARATOR': '.', 'FUEL_COST': True, 'ALGORITHM': 'Optimum Power Flow', 'MVAR_COST': False, 'ITERATION_LIMIT': 20, 'SAVE_CASE_BEFORE_UNIT_COMMITMENT': False, 'LOCK_TAPS': True, 'LOADSHEDDING_COST': False}, 'CORRELATION': {'CorrelationMatrix': ["['load']", '[1.0]']}, 'DISTRIBUTIONload': {'Load': ['CITYB30__Lo1', 'CITYC30__Lo1', 'CITYD30__Lo1', 'ODIESEL__Lo1'], 'A': 0.8, 'B': 0.9, 'Activated': True, 'Sampling': 'Same sample for all loads', 'ComponentType': 'Load', 'Law': 'Uniform', 'Type': 'Load Level'}, 'SIMULATION': {'NUMBER_PACKAGE': 1, 'SIZE_PACKAGE': 10}}
\ No newline at end of file
--- /dev/null
+# -*- coding: cp1252 -*-
+#===============================================================================
+# PSEN SCRIPT FOR PROBABILISTIC STUDIES OF ELECTICAL NETWORKS
+#===============================================================================
+from openturns import *
+from pylab import *
+from math import*
+import os, random, sys
+import numpy as np
+import time #import gmtime, strftime, sleep
+from array import *
+
+from support_functions import *
+import pdb
+import multiprocessing
+import copy
+import PSENconfig #file with Eficas output dictionaries
+import shutil
+import csv
+
+InitializeDispatchGentoP0 = False
+Debug = True #pour faire des tests
+## =============================================================================================
+def function_callback(result): #define callback for a probabilistic study
+ output.extend(result[0])
+ inputSamp.extend(result[1])
+ Pmachine.extend(result[2])
+## =============================================================================================
+def callback_indices(indices): #define callback function for probabilistic study
+ Ind1.extend(indices[0])
+ Ind2.extend(indices[1])
+## =============================================================================================
+def function_callback_psse(result): #define callback function for time study
+ #print(result)
+ output.extend(result[1])
+ inputSamp.extend(result[0])#5])
+ Pmachine.extend(result[2])#6])
+
+def log(filename, text):
+ f=open(filename, 'a')
+ f.write(text)
+ f.close()
+
+## =============================================================================================
+def init_PSSEWrapper():
+ sys.path.append(PSENconfig.Dico['DIRECTORY']['PSSE_path'])
+ os.environ['PATH'] = PSENconfig.Dico['DIRECTORY']['PSSE_path'] + ";"+ os.environ['PATH']
+
+ if Debug:
+ cmd_Path=os.getcwd()+'\usrCmd.py' #lancement depuis pssewrapper.py
+ #cmd_Path=os.getcwd()+'\PSEN\usrCmd.py' #lancement depuis qteficas_psen.py
+ else:
+ cmd_Path=os.path.join(os.path.dirname(os.path.abspath(__file__)),"usrCmd.py")
+ ##cmd_Path=os.getcwd()+'\EficasV1\PSEN_Eficas\PSEN\usrCmd.py' #lancement avec le .bat
+ return cmd_Path
+## =============================================================================================
+def init_PSSE(Paths):
+ ## Inititalisation de PSSE
+ import psspy
+ import pssarrays
+ import redirect
+ _i=psspy.getdefaultint()
+ _f=psspy.getdefaultreal()
+ _s=psspy.getdefaultchar()
+ redirect.psse2py()
+ psspy.psseinit(80000)
+
+ # Silent execution of PSSe
+ islct=6 # 6=no output; 1=standard
+ psspy.progress_output(islct)
+
+ #read sav
+ psspy.case(Paths['sav_file'])
+ all_inputs_init=read_sav(Paths['sav_file'])
+
+## rappel sur la strucutre de item[] qui contient les elements reseau
+# plants = all_inputs_init[3]
+# for item in plants:
+# bus = item[0]
+# status = item[1]
+# _id = item[2]
+# pgen = item[3]
+# qgen = item[4]
+# mvabase = item [5]
+# pmax = item[6]
+# qmax = item[7]
+# name = item[8]
+
+ if Debug:
+ print("all_inputs_init[][] = generateurs ", " init_PSSE")
+ for item in all_inputs_init[3]:
+ print(item[8])
+ return all_inputs_init
+
+## =============================================================================================
+def read_PSENconfig():
+ """"
+ Read the file PSENconfig
+ PSENconfig contains all the information about the element in the network and the user configuration
+ """
+ Paths = PSENconfig.Dico['DIRECTORY']
+ SimuParams = PSENconfig.Dico['SIMULATION']
+ PSSEParams = PSENconfig.Dico['PSSE_PARAMETERS']
+
+ # Probabilistic Study: central dispersion => Monte Carlo or LHS iterations
+ if SimuParams.has_key('NUMBER_PACKAGE'):
+ nb_fix = int(SimuParams['NUMBER_PACKAGE'])
+ elif SimuParams.has_key('CONVERGENCE'):
+ if SimuParams['CONVERGENCE']==1:
+ nb_fix=0
+ else:
+ nb_fix=100
+ print '\nALERT:\nConvergence not selected, and no number of packages chosen: default number= 100'
+ time.sleep(2)
+
+ #CHARGEMENT DE PSSE - LOADING OF PSSE
+# pssFolder=str(Paths['PSSE_path']) ### ne semble pas etre utilise
+ os.environ['PATH'] += ';' + Paths['results_folder']
+ os.chdir(Paths['results_folder'])
+
+ if Debug:
+ print(Paths, SimuParams, PSSEParams, nb_fix, " Paths, SimuParams, PSSEParams, nb_fix", " read_PSENconfig()")
+
+ return Paths, SimuParams, PSSEParams, nb_fix
+
+## =============================================================================================
+#### TEST A FAIRE : creer deux PSENConfig differents : 1 ou matrice de correlation presente et l'autre non pour voir si "Laws" correct
+def read_laws():
+ """
+ si la loi = pdf_from_file ou time_serie_from_file : on va lire les donnees contenues dans le csv associe
+ et on met a jour le dictionnaire Laws[shortkey]['FileContents']
+
+ fonction a faire evoluer pour traiter toutes les lois de la meme maniere
+ """
+ ## si la matrice de correlation existe, on lit l entete de la matrice et on cree une liste
+ if PSENconfig.Dico.has_key('CORRELATION'):
+ LawNames = RemoveListfromString(PSENconfig.Dico['CORRELATION']['CorrelationMatrix'][0]) ## RemoveListfromString est def ds support_functions
+ Laws = {} ## contient l ensemble des distributions (copié depuis PSENconfig)
+ NonActiveIndices = [] ## comprendre ce que cela contient
+ TSindices = [] ## comprendre ce que cela contient
+ for key in PSENconfig.Dico.keys():
+ if key[0:12] == 'DISTRIBUTION':
+ shortkey = key[12:]
+ if PSENconfig.Dico[key]['Activated']==True: #only take into account laws which are "activated"
+ Laws[shortkey]= PSENconfig.Dico[key]
+ if Laws[shortkey]['Law']=='PDF_from_file': #read contents of .csv file
+ g=open(Laws[shortkey]['FileName'],"r")
+ lines=g.readlines()
+ g.close()
+ Laws[shortkey]['FileContents']=lines
+ elif Laws[shortkey]['Law']=='TimeSeries_from_file': #read contents of .csv file
+ g=open(Laws[shortkey]['FileName'],"r")
+ lines=g.readlines()
+ g.close()
+ Laws[shortkey]['FileContents']=lines
+ if PSENconfig.Dico.has_key('CORRELATION'):
+ TSindices.append(LawNames.index(shortkey))
+ if Laws[shortkey].has_key(Laws[shortkey]['ComponentType']):
+ if isinstance(Laws[shortkey][Laws[shortkey]['ComponentType']],str):
+ Laws[shortkey][Laws[shortkey]['ComponentType']]=[Laws[shortkey][Laws[shortkey]['ComponentType']]] #if only one entry, create list
+ if Laws[shortkey]['ComponentType']=='Reserve Constraint':
+ Laws[shortkey]['Type']='Reserve Constraint'
+ if Laws[shortkey].has_key('TF_Input'): #If user inputted transfer function
+ Laws[shortkey]['TransferFunction']=True
+ else:
+ Laws[shortkey]['TransferFunction']=False
+ else:
+ if PSENconfig.Dico.has_key('CORRELATION'):
+ NonActiveIndices.append(LawNames.index(shortkey))
+ if Debug:
+ print(Laws, TSindices, NonActiveIndices, LawNames)
+
+ return Laws, TSindices, NonActiveIndices, LawNames
+## =============================================================================================
+
+def read_or_create_corrmatrix(LawNames, NonActiveIndices, TSindices):
+ if PSENconfig.Dico.has_key('CORRELATION'):
+ #Treat Correlation Matrix - eliminate non-activated laws
+ CorrMatrix0 = {}
+ LawNames2 = []
+
+ for i, lawname in enumerate(LawNames):
+ if i not in NonActiveIndices:
+ LawNames2.append(lawname)
+ Cmax = PSENconfig.Dico['CORRELATION']['CorrelationMatrix'][1:]
+ CMax = []
+ for i,c in enumerate(Cmax):
+ if i not in NonActiveIndices:
+ c = RemoveListfromString(c)
+ c = map(float,c)
+ c2 = []
+ for ind, c_el in enumerate(c):
+ if ind not in NonActiveIndices:
+ #if time series, don't correlate other laws with the value "1".
+ if (ind not in TSindices) and (i not in TSindices):
+ c2.append(c_el)
+ elif i==ind:
+ c2.append(1.)
+ else:
+ c2.append(0.)
+ CMax.append(c2)
+ CorrMatrix0['matrix'] = np.array(CMax)
+ CorrMatrix0['laws'] = LawNames2
+
+ else: #acceptable only if all active distributions are time series or if only 1 active distribution
+ if len(Laws)==1: #create correlation matrix of 1 x 1
+ CorrMatrix0 = {}
+ CorrMatrix0['matrix'] = np.array([[1]])
+ CorrMatrix0['laws'] = Laws.keys()
+ else: #>1 law, test if all TS
+ allTS=True
+ for key in Laws.keys():
+ if Laws[key]['Law']!='TimeSeries_from_file':
+ allTS=False
+ if allTS:
+ CorrMatrix0 = {}
+ CorrMatrix0['matrix']=np.eye(len(Laws))
+ CorrMatrix0['laws']=Laws.keys()
+ else:
+ print 'Error: Correlation matrix must be defined. Enter 0''s for correlations between laws and time series.'
+ sys.exit(1)
+
+ if Debug:
+ print(CorrMatrix0, " read_or_create_corrmatrix(LawNames, NonActiveIndices, TSindices)", " CorrMatrix0")
+
+ return CorrMatrix0
+
+## =============================================================================================
+def contingency():
+ """
+ utilise la fonction config_contingency() definie dans support_functions.py
+ """
+ # Treat Contingency Files enteres as CSVs
+ LinesList = []
+ GeneratorsList = []
+ LoadsList = []
+ TransformersList = []
+ MotorsList = []
+
+ if PSENconfig.Dico.has_key('N_1_LINES'):
+ if PSENconfig.Dico['N_1_LINES']['Activated']==True:
+ LinesList = PSENconfig.Dico['N_1_LINES']['Probability']
+ if PSENconfig.Dico.has_key('N_1_GENERATORS'):
+ if PSENconfig.Dico['N_1_GENERATORS']['Activated']==True:
+ GeneratorsList = PSENconfig.Dico['N_1_GENERATORS']['Probability']
+ if PSENconfig.Dico.has_key('N_1_LOADS'):
+ if PSENconfig.Dico['N_1_LOADS']['Activated']==True:
+ LoadsList = PSENconfig.Dico['N_1_LOADS']['Probability']
+ if PSENconfig.Dico.has_key('N_1_TRANSFORMERS'):
+ if PSENconfig.Dico['N_1_TRANSFORMERS']['Activated']==True:
+ TransformersList = PSENconfig.Dico['N_1_TRANSFORMERS']['Probability']
+ if PSENconfig.Dico.has_key('N_1_MOTORS'):
+ if PSENconfig.Dico['N_1_MOTORS']['Activated']==True:
+ MotorsList = PSENconfig.Dico['N_1_MOTORS']['Probability']
+
+ try :
+ continLines, continGroups, continTransfos, continLoads, continMotors, continVal, continProb = config_contingency(LinesList,GeneratorsList,TransformersList,LoadsList,MotorsList)
+ except IOError : # Si le fichier n'est pas dans un bon format on traite l'exception
+ print 'Error with contingency input file'
+ else :
+ continLines, continGroups, continTransfos, continLoads, continMotors, continVal, continProb = config_contingency(LinesList,GeneratorsList,TransformersList,LoadsList,MotorsList)
+
+ if len(continVal)>0:
+ N_1_fromFile = True
+ else:
+ N_1_fromFile = False
+
+ # Creation variable nom dossier N-1
+ if N_1_fromFile == True :
+ folderN_1 = '1_'
+ else :
+ folderN_1 = '_'
+
+ if Debug:
+ print(continLines, continGroups, continTransfos, continLoads, continMotors, continVal, continProb, N_1_fromFile, folderN_1, " continLines, continGroups, continTransfos, continLoads, continMotors, continVal, continProb, N_1_fromFile, folderN_1", " fonction : contingency()")
+ return continLines, continGroups, continTransfos, continLoads, continMotors, continVal, continProb, N_1_fromFile, folderN_1
+## ===============================================================================================
+
+def TS(CorrMatrix):
+ # Definition des variables pour les series temporelles
+ # a passer en pandas ?
+
+ time_serie_flag=[]
+ time_serie_mat=[]
+ time_serie_time=[]
+ timeVect = []
+ for i,key in enumerate(CorrMatrix['laws']) :
+ if Laws[key]['Law']=='TimeSeries_from_file':
+ linesTS = Laws[key]['FileContents']
+ time_serie = 1 #raise the flag time_serie
+ tsm=[]
+ tVect=[]
+ for j in range (len(linesTS)) :
+ try:
+ tsm.append(float(commaToPoint(linesTS[j].split(';')[1])))
+ tVect.append(linesTS[j].split(';')[0])
+ except :
+ pass
+ time_serie_time.append(tVect)
+ time_serie_flag.append(1)
+ time_serie_mat.append(tsm)
+ else:
+ time_serie_flag.append(-1)
+ if N_1_fromFile==True:
+ time_serie_flag.append(-1)
+
+ #find shortest time series column
+ try:
+ time_serie
+ timeVect = time_serie_time[0]
+ for index, tV in enumerate(time_serie_time):
+ if len(tV) < len(timeVect):
+ timeVect = tV
+ except NameError:
+ pass
+
+ #change time Vector into iteration numbers (otherwise difficult for post processing)
+ N = len(timeVect)
+ timeVect = range(1, N+1)
+
+ time_serie_mat=zip(*time_serie_mat)
+
+ if Debug:
+ print(time_serie_flag, time_serie_mat, time_serie_time, timeVect, " time_serie_flag, time_serie_mat, time_serie_time, timeVect", " fonction TS()")
+
+ return time_serie_flag, time_serie_mat, time_serie_time, timeVect
+## ===============================================================================================
+
+
+
+""" DEBUT DU MAIN """
+
+if __name__ == '__main__':
+
+ cmd_Path = init_PSSEWrapper()
+ Paths, SimuParams, PSSEParams, nb_fix = read_PSENconfig()
+ all_inputs_init = init_PSSE(Paths)
+ log("report.txt", "Starting time: %f; Monte Carlo Size : %f; " % (time.clock(), SimuParams["SIZE_PACKAGE"]))
+
+ Laws, TSindices, NonActiveIndices, LawNames = read_laws()
+ CorrMatrix = read_or_create_corrmatrix(LawNames, NonActiveIndices, TSindices)
+
+ continLines, continGroups, continTransfos, continLoads, continMotors, continVal, continProb, N_1_fromFile, folderN_1 = contingency()
+
+ time_serie_flag, time_serie_mat, time_serie_time, timeVect = TS(CorrMatrix)
+
+
+ exit()
+
+
+
+ ## configuration de l opf dans psse
+ if PSSEParams['ALGORITHM']=='Optimum Power Flow': #run OPF so that adjustable bus shunts are included
+ psspy.produce_opf_log_file(1,r"""DETAIL""")
+ TapChange = 1-int(PSSEParams['LOCK_TAPS']) #0 if locked, 1 if stepping
+ psspy.opf_fix_tap_ratios(1-TapChange) #0 : do not fix transformer tap ratios
+ psspy.report_output(6,"",[0,0]) #6=no outputpsspy
+ psspy.minimize_fuel_cost(int(PSSEParams['FUEL_COST']))
+ psspy.minimize_adj_bus_shunts(int(PSSEParams['MVAR_COST']))
+ psspy.minimize_load_adjustments(int(PSSEParams['LOADSHEDDING_COST']))
+ psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
+ psspy.set_opf_report_subsystem(3,0)
+
+ #access OPF data
+ allbus=1
+ include = [1,1,1,1] #isolated buses, out of service branches, subsystem data, subsystem tie lines
+ out = 0 #out to file, not window
+ # if psspy.bsysisdef(0):
+ # sid = 0
+ # else: # Select subsytem with all buses
+ # sid = -1
+ sid = 3
+ RopFile = Paths['sav_file'][0:-4]+'.rop'
+ AlreadyRop = os.path.isfile(RopFile)
+ if not AlreadyRop:
+ ierr = psspy.rwop(sid,allbus,include,out,RopFile) #write rop file
+ GenDispatchData, DispTableData, LinCostTables, QuadCostTables, PolyCostTables, GenReserveData, PeriodReserveData,AdjBusShuntData,AdjLoadTables = readOPFdata(RopFile)
+ if PSSEParams['UNIT_COMMITMENT']:
+ if PSSEParams.has_key('SpinningReserveID'):
+ PSSEParams['SpinningReserveID_1']= PSSEParams['SpinningReserveID']
+ del PSSEParams['SpinningReserveID']
+ for num in range(1,16):
+ keyname = 'SpinningReserveID_' + str(int(num))
+ if PSSEParams.has_key(keyname):
+ ReserveID = PSSEParams[keyname]
+ ReserveFound = False
+ ReserveActive=False
+ for PRD in PeriodReserveData:
+ if PRD[0] == ReserveID:
+ ReserveFound=True
+ ReserveActive=PRD[3]
+ if not ReserveFound:
+ print 'ALERT: ReserveID ', str(ReserveID), ' is not found. User must define period reserve in .sav file before incluing a distribution on the reserve constraint in PSEN.'
+ if not ReserveActive:
+ print 'ALERT: Spinning Reserve Correction entered in PSEN, but ReserveID ', str(ReserveID), ' is not activated in PSS/E.'
+ else:
+ pass
+ psspy.nopf(0,1) # Lancement OPF
+ postOPFinitialization(Paths['sav_file'],all_inputs_init,AdjLoadTables,init_gen=True,init_bus=True,init_fxshnt=True,init_swshnt=True,init_load=True,init_P0=InitializeDispatchGentoP0)
+ #print "OPF run"
+
+ all_inputs_after_OPF = read_sav(Paths['sav_file'])
+
+## est ce qu on recopie ?
+ buses_base=all_inputs_after_OPF[0]
+ lines_base=all_inputs_after_OPF[1]
+ trans_base=all_inputs_after_OPF[2]
+ plants_base=all_inputs_after_OPF[3]
+ loads_base=all_inputs_after_OPF[4]
+ shunt_base=all_inputs_after_OPF[5]
+ motors_base=all_inputs_after_OPF[6]
+ trans3_base=all_inputs_after_OPF[7]
+ swshunt_base=all_inputs_after_OPF[8]
+
+ ## passer en pandas
+ # Initialize size output
+ sizeY0=len(plants_base) #np.matrix(plants_base).shape[0]
+ sizeY1=len(buses_base) #np.matrix(buses_base).shape[0]
+ sizeY2=len(lines_base) #np.matrix(lines_base).shape[0]
+ sizeY3=len(loads_base) #np.matrix(loads_base).shape[0]
+ sizeY4=len(shunt_base) #np.matrix(shunt_base).shape[0]
+ sizeY5=len(trans_base) #np.matrix(trans_base).shape[0]
+ sizeY6=len(motors_base) #np.matrix(motors_base).shape[0]
+ sizeY7=len(trans3_base)
+ sizeY8=len(swshunt_base) #np.matrix(shunt_base).shape[0]
+ sizeY=[sizeY0,sizeY1,sizeY2,sizeY5,sizeY7,sizeY3,sizeY6,sizeY4,sizeY8]
+ sizeOutput=sizeY2
+
+ #####################################################################################
+ ## a mettre dans la partie "lecture des parametres"
+ if SimuParams.has_key('MAX_CORES'):
+ max_cores = SimuParams['MAX_CORES']
+ else:
+ max_cores = multiprocessing.cpu_count()
+
+ try:
+ time_serie
+ except NameError: #probabilistic
+ if max_cores==1:
+ print('Must use at least 2 cores for probabilistic simulation. MAX_CORES parameter set to 2.')
+ max_cores=2
+ num_cores=min(min(multiprocessing.cpu_count(),max_cores)-1, nb_fix) #Num cores
+
+ print('Number of cores used: ' + str(num_cores + 1))
+ ## a tester ( a prioiri on ne passe pas dans le else et donc on n'arrive pas à ne pas faire du multiprocessing)
+ ## on a decale le else sous le if : cela devrait fonctionner
+ else:
+ num_cores=min(multiprocessing.cpu_count(),max_cores)
+ NoMultiProcTS=False
+ if num_cores==1:
+ NoMultiProcTS = True
+ if Debug==True:
+ NoMultiProcTS = True
+ print('Number of cores used: ' + str(num_cores))
+
+
+ #Extension name for the folders and files
+ day=time.strftime("%Y%m%d", time.gmtime())
+ hour=time.strftime("%Hh%Mm%S", time.gmtime())
+
+ # Initialize the big folder
+ pathBigFolder = Paths['results_folder']+"/N"+folderN_1+day+"_"+hour
+ if not os.path.exists(pathBigFolder): os.makedirs(pathBigFolder)
+
+
+ #folder=Paths['results_folder']+"/N"+folderN_1+day #big folder
+ for j in range(num_cores):
+ # Initialize a folder per core
+ pathSmallFolder = pathBigFolder+'\package'+str(j)+"_N"+folderN_1+day+"_"+hour
+ if not os.path.exists(pathSmallFolder): os.makedirs(pathSmallFolder)
+ #####################################################################################
+
+
+ ## ecriture des fichiers de sortie
+ ## a passer en pandas
+
+ ## ecriture des entetes
+ # Initialize the logger : write the headers
+ entete = ""
+ unit = ""
+ for key in CorrMatrix['laws']:
+ if Laws[key]['ComponentType']=='Generator':
+ if Laws[key]['Type']=='Generator Availability':
+ entete+="X:genStatus" + key + ";"
+ unit += ";"
+ else:
+ entete+="X:Gen" + key + "(%Pnom);"
+ unit += "%Pnom;"
+ elif Laws[key]['ComponentType']=='Load':
+ if Laws[key]['Type']=='Load Availability':
+ entete+="X:loadStatus" + key + ";"
+ unit += ";"
+ else:
+ entete+="X:Load" + key + "(p.u.);"
+ unit += "p.u.;"
+ elif Laws[key]['ComponentType']=='Line':
+ entete+="X:lineStatus" + key + ";"
+ unit += ";"
+ elif Laws[key]['ComponentType']=='Transformer':
+ entete+="X:transfoStatus" + key + ";"
+ unit += ";"
+ elif Laws[key]['ComponentType']=='Motor':
+ entete+="X:motorStatus" + key + ";"
+ unit += ";"
+
+ elif Laws[key]['ComponentType']=='Reserve Constraint':
+ entete+="X:Reserve" + key + ";"
+ unit += "MW;"
+
+ if N_1_fromFile==True:
+ entete += "X:N-1;"
+ unit += "component disconnected;"
+ entete2=entete + ";Y:NumTransitLine;Y:NumTransitTr;Y:NumVoltage;Y:GenTot;Y:LoadTot;Y:%Losses;Y:Max%ALine;Y:Max%ATr;Y:NumTransit_0.9-1Line;Y:NumTransit_0.9-1Tr;Y:AddedMVAR;Y:LoadShedding;Y:GensDisconnected;;"
+ if PSSEParams['ALGORITHM']=='Economic Dispatch and Power Flow':
+ entete+=";Y:NumTransitLine;Y:NumTransitTr;Y:NumVoltage;Y:GenTot;Y:LoadTot;Y:%Losses;Y:Max%ALine;Y:Max%ATr;Y:NumTransit_0.9-1Line;Y:NumTransit_0.9-1Tr;Y:AddedMVAR;Y:LoadShedding;Y:PlimitSwing;Y:QlimitSwing;;"
+ else:
+ entete+=";Y:NumTransitLine;Y:NumTransitTr;Y:NumVoltage;Y:GenTot;Y:LoadTot;Y:%Losses;Y:Max%ALine;Y:Max%ATr;Y:NumTransit_0.9-1Line;Y:NumTransit_0.9-1Tr;Y:AddedMVAR;Y:LoadShedding;;"
+
+
+ unit2= unit + ';Num;Num;Num;MW;MW;%;%;%;Num;Num;MVAR;MW;[(bus, id),...];;'
+ if PSSEParams['ALGORITHM']=='Economic Dispatch and Power Flow':
+ unit+=';Num;Num;Num;MW;MW;%;%;%;Num;Num;MVAR;MW;T/F;T/F;;'
+ else:
+ unit+=';Num;Num;Num;MW;MW;%;%;%;Num;Num;MVAR;MW;;'
+ string = "Iteration;;" + entete
+ unitstring = "Num;;" + unit
+ string2 = "Iteration;;" + entete2
+ unitstring2 = "Num;;" + unit2
+
+ logCSVfilename=[]
+ logCSVfilename_UC=[]
+ ## attention : on ecrit un fichier de sortie dans chaque sous dossier
+ for i in range(num_cores):
+ logCSVfilename.append(pathBigFolder+'/package'+str(i)+"_N"+folderN_1+day+ "_" + hour + "/simulationDClog_"+hour+".csv") # Name of the file : global variable
+ logCSVfilename_UC.append(pathBigFolder+'/package'+str(i)+"_N"+folderN_1+day+ "_" + hour + "/simulationDClog_beforeUC_"+hour+".csv") # Name of the file : global variable
+ f = open(logCSVfilename[i], "a")
+ f2 = open(logCSVfilename_UC[i], "a")
+
+ f.write(string)
+ f2.write(string2)
+
+ # Names of the Output variables with the bus number
+ for name in range (sizeY0):
+ f.write("Y:PMachine"+str(plants_base[name][0])+"id"+ str(plants_base[name][2])+ ";")
+ f2.write("Y:PMachine"+str(plants_base[name][0])+"id"+ str(plants_base[name][2])+ ";")
+ for name in range (sizeY0):
+ f.write("Y:QMachine"+str(plants_base[name][0])+"id"+ str(plants_base[name][2])+";")
+ f2.write("Y:QMachine"+str(plants_base[name][0])+"id"+ str(plants_base[name][2])+";")
+ for name in range (sizeY1):
+ f.write("Y:VBus"+str(buses_base[name][0])+";")
+ f2.write("Y:VBus"+str(buses_base[name][0])+";")
+ for name in range (sizeY2):
+ f.write("Y"+str(name+1)+":%Rate "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
+ f2.write("Y"+str(name+1)+":%Rate "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
+ for name in range (sizeY2):
+ f.write("Y"+str(name+1)+":P "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
+ f2.write("Y"+str(name+1)+":P "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
+ for name in range (sizeY2):
+ f.write("Y"+str(name+1)+":Q "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
+ f2.write("Y"+str(name+1)+":Q "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
+ for name in range (sizeY5):
+ f.write("Y"+str(name+1)+":Tr%Rate "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
+ f2.write("Y"+str(name+1)+":Tr%Rate "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
+ for name in range (sizeY5):
+ f.write("Y"+str(name+1)+":TrP "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
+ f2.write("Y"+str(name+1)+":TrP "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
+ for name in range (sizeY5):
+ f.write("Y"+str(name+1)+":TrQ "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
+ f2.write("Y"+str(name+1)+":TrQ "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
+
+ for name in range (sizeY7):
+ f.write("Y"+str(name+1)+":Tr3%Rate "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
+ f2.write("Y"+str(name+1)+":Tr3%Rate "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
+ for name in range (sizeY7):
+ f.write("Y"+str(name+1)+":Tr3P "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
+ f2.write("Y"+str(name+1)+":Tr3P "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
+ for name in range (sizeY7):
+ f.write("Y"+str(name+1)+":Tr3Q "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
+ f2.write("Y"+str(name+1)+":Tr3Q "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
+ for name in range (sizeY3):
+ f.write("Y:Load "+str(loads_base[name][0])+" id"+ str(loads_base[name][5])+";")
+ f2.write("Y:Load "+str(loads_base[name][0])+" id"+ str(loads_base[name][5])+";")
+ for name in range (sizeY6):
+ f.write("Y:MotorP "+str(motors_base[name][0])+" id"+ str(motors_base[name][5])+";")
+ f2.write("Y:MotorP "+str(motors_base[name][0])+" id"+ str(motors_base[name][5])+";")
+ for name in range (sizeY6):
+ f.write("Y:MotorQ "+str(motors_base[name][0])+" id"+ str(motors_base[name][5])+";")
+ f2.write("Y:MotorQ "+str(motors_base[name][0])+" id"+ str(motors_base[name][5])+";")
+ for name in range (sizeY4):
+ f.write("Y:Shunt bus "+str(shunt_base[name][0])+" id"+ str(shunt_base[name][5])+";")
+ f2.write("Y:Shunt bus "+str(shunt_base[name][0])+" id"+ str(shunt_base[name][5])+";")
+ for name in range (sizeY8):
+ f.write("Y:Sw shunt bus "+str(swshunt_base[name][0])+";")
+ f2.write("Y:Sw shunt bus "+str(swshunt_base[name][0])+";")
+ f.write("\n")
+ f2.write("\n")
+ # Names of the Output variables with the bus names
+ f.write(unitstring)
+ f2.write(unitstring2)
+ for name in range (sizeY0):
+ f.write(str(plants_base[name][8]).replace('\n','')+";")
+ f2.write(str(plants_base[name][8]).replace('\n','')+";")
+ for name in range (sizeY0):
+ f.write(str(plants_base[name][8]).replace('\n','')+";")
+ f2.write(str(plants_base[name][8]).replace('\n','')+";")
+ for name in range (sizeY1):
+ f.write(str(buses_base[name][3]).replace("\n",'')+";")
+ f2.write(str(buses_base[name][3]).replace("\n",'')+";")
+ for name in range (sizeY2):
+ f.write(str(lines_base[name][8]).replace("\n",'').replace("-","_")+ " - " +str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ f2.write(str(lines_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ for name in range (sizeY2):
+ f.write(str(lines_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ f2.write(str(lines_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ for name in range (sizeY2):
+ f.write(str(lines_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ f2.write(str(lines_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ for name in range (sizeY5):
+ f.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ f2.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ for name in range (sizeY5):
+ f.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ f2.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ for name in range (sizeY5):
+ f.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ f2.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
+ for name in range (sizeY7):
+ f.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
+ f2.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
+ for name in range (sizeY7):
+ f.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
+ f2.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
+ for name in range (sizeY7):
+ f.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
+ f2.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
+ for name in range (sizeY3):
+ f.write(str(loads_base[name][4]).replace("\n",'')+";")
+ f2.write(str(loads_base[name][4]).replace("\n",'')+";")
+ for name in range (sizeY6):
+ f.write(str(motors_base[name][4]).replace("\n",'')+";")
+ f2.write(str(motors_base[name][4]).replace("\n",'')+";")
+ for name in range (sizeY6):
+ f.write(str(motors_base[name][4]).replace("\n",'')+";")
+ f2.write(str(motors_base[name][4]).replace("\n",'')+";")
+ for name in range (sizeY4):
+ f.write(str(shunt_base[name][3]).replace("\n",'')+";")
+ f2.write(str(shunt_base[name][3]).replace("\n",'')+";")
+ for name in range (sizeY8):
+ f.write(str(swshunt_base[name][3]).replace("\n",'')+";")
+ f2.write(str(swshunt_base[name][3]).replace("\n",'')+";")
+ f.write("\n")
+ f2.write("\n")
+ f.close()
+ f2.close()
+
+ ## faire le test avant l ecriture des deux fichiers
+ if PSSEParams['ALGORITHM']=='Economic Dispatch and Power Flow':
+ PSSEParams['MVAR_COST'] = False
+ for filename in logCSVfilename_UC:
+ os.remove(filename)
+ else:
+ if not PSSEParams['UNIT_COMMITMENT']:
+ for filename in logCSVfilename_UC:
+ os.remove(filename)
+
+
+ # Definition of size input/output
+ inputDim = len(Laws.keys())+ int(N_1_fromFile)
+ outputDim = 12 + 2*int(PSSEParams['ALGORITHM']=='Economic Dispatch and Power Flow')
+
+
+ #Create dictionnary for different useful values to use psse function
+ ## ??
+ dico={'TStest':0,'Xt':[],'sizeY0':sizeY0,'sizeY1':sizeY1,'sizeY2':sizeY2,\
+ 'sizeY3':sizeY3,'sizeY4':sizeY4,'sizeY5':sizeY5,'sizeY6':sizeY6,'sizeY7':sizeY7,'sizeY8':sizeY8, 'sizeY':sizeY,\
+ 'folder':pathBigFolder,'folderN_1':folderN_1,\
+ 'day':day,'position':0,'PSSEParams': PSSEParams,\
+ '_i':_i,'_f':_f,'_s':_s,'lenpac':SimuParams['SIZE_PACKAGE'],\
+ 'num_pac':0,'logCSVfilename':logCSVfilename,'logCSVfilename_UC':logCSVfilename_UC,'Laws':Laws,'CorrMatrix': CorrMatrix,\
+ 'Generators':PSENconfig.MachineDico,'Loads':PSENconfig.LoadDico, 'Motors':PSENconfig.MotorDico,\
+ 'Lines':PSENconfig.LineDico,'Transformers':PSENconfig.TransfoDico,\
+ 'doc_base':'','continLines':continLines,'continTransfos':continTransfos,'timeVect':[],\
+ 'continGroups':continGroups,'continLoads':continLoads,'continMotors':continMotors,'continVal':continVal,'continProb':continProb,\
+ 'N_1_fromFile': N_1_fromFile,'all_inputs_init':all_inputs_after_OPF, 'AdjLoadTables':AdjLoadTables, 'Paths':Paths}
+
+ if PSSEParams["ALGORITHM"]=="Optimum Power Flow":
+ dico['flag2']=int(PSSEParams['MVAR_COST'])
+ dico['UnitCommitment']= PSSEParams['UNIT_COMMITMENT']
+ else:
+ dico['flag2']=False
+ dico['UnitCommitment']=False
+
+#===============================================================================
+# EXECUTION
+#===============================================================================
+
+
+
+ print "\n\n\n Starting PSEN "
+
+ inputSamp=[]
+
+ outputSampleAll=NumericalSample(0,12 + 2*int(PSSEParams["ALGORITHM"]=="Economic Dispatch and Power Flow"))#initialization
+ ymachine=NumericalSample(0,sizeY0)
+ output=[]
+
+ inputSamp=[]
+ LStable=[]
+ FStable=[]
+ Pmachine=[]
+
+ Ind1=[]
+ Ind2=[]
+
+ def function_callback(result): #define callback for a probabilistic study
+ output.extend(result[0])
+ inputSamp.extend(result[1])
+ Pmachine.extend(result[2])
+
+ def callback_indices(indices): #define callback function for probabilistic study
+ Ind1.extend(indices[0])
+ Ind2.extend(indices[1])
+
+ def function_callback_psse(result): #define callback function for time study
+ #print(result)
+ output.extend(result[1])
+ inputSamp.extend(result[0])#5])
+ Pmachine.extend(result[2])#6])
+
+
+ try :
+ time_serie
+
+ except NameError :
+ print 'Probabilistic'
+
+ #create new dico for each process which is going to be launched
+ liste_dico=[]
+ for i in range(num_cores):
+ dico['num_pac']=i
+ psspy.case(Paths['sav_file'])
+ dico['doc_base']=os.path.join(pathBigFolder,'package'+str(i)+"_N"+folderN_1+day+"_"+hour) #working directory of each package
+ psspy.save(os.path.join(dico['doc_base'],"BaseCase.sav" )) #create a initial case for each package
+ RopFile = Paths['sav_file'][0:-4]+'.rop'
+ RopFile2 = os.path.join(dico['doc_base'],"BaseCase.rop" )
+ shutil.copy(RopFile,RopFile2)
+
+ liste_dico.append(dico.copy()) #append a new dico to the list
+ os.environ['PATH'] += ';' + dico['doc_base'] #add the path of each directory
+
+ dico['TStest']=0
+ cur_dir=os.getcwd() #get the current directory path
+ tmp=sys.stdout #get the stdout path
+
+ #pdb.set_trace()##################?
+
+ po=multiprocessing.Pool(maxtasksperchild=1)
+ m1=multiprocessing.Manager()
+ m2=multiprocessing.Manager()
+ data=m1.Queue()
+ msg=m2.Queue()
+ msg.put('ok')
+
+
+ if nb_fix==0 or num_cores < nb_fix :
+ print "Convergence criteria or fewer cores than packages to run"
+
+ if Debug:
+ #res=Convergence(data,msg,int(PSSEParams['MVAR_COST']),nb_fix,cmd_Path)
+ res=Calculation(liste_dico[0].copy(),data,msg)
+
+ else:
+ #either for stop criteria or for a big number of package
+ for l in range(num_cores+1):
+ if l!=num_cores:
+ p= po.apply_async(Calculation,args=(liste_dico[l].copy(),data,msg,),\
+ callback=function_callback)
+ else:
+ p= po.apply_async(Convergence,args=(data,msg,int(PSSEParams['MVAR_COST']),nb_fix,cmd_Path,),\
+ callback=callback_indices)
+
+ po.close()
+ po.join()
+
+ elif num_cores>=nb_fix and nb_fix!=0:
+ print "Fixed number of packages, fewer packages than cores"
+
+ if Debug:
+ #res=Convergence(data,msg,int(PSSEParams['MVAR_COST']),nb_fix,cmd_Path)
+ res=Calculation(liste_dico[0].copy(),data,msg)
+ else:
+ #for a small number of packages
+ for l in range(nb_fix+1):
+
+ if l!=nb_fix:
+ p= po.apply_async(Calculation,args=(liste_dico[l].copy(),data,msg,),\
+ callback=function_callback)
+ else:
+ p= po.apply_async(Convergence,args=(data,msg,int(PSSEParams['MVAR_COST']),nb_fix,cmd_Path,),\
+ callback=callback_indices)
+ po.close()
+ po.join()
+
+
+ os.chdir(cur_dir) #back to the working directory
+ sys.stdout=tmp #back to the shell stdout
+
+
+ else:
+ print 'Time series'
+
+ dico['TStest']=1
+ Xt=[]
+ for i in range (len(time_serie_mat)) : #as many as there are points in the time serie
+
+ Xt0=[]
+ n=0
+ for j in range (len(time_serie_flag)) : #for each variable
+
+ if time_serie_flag[j] == -1 : #if not a time series
+ Xt0.append(-1)
+ n+=1
+ else :
+ Xt0.append(time_serie_mat[i][j-n]) #append the element
+
+ Xt.append(Xt0)
+
+ liste_dico=[]
+ ipos=0
+
+ RandomGenerator.SetSeed(os.getpid())
+ inputDistribution=create_dist(dico)
+ samples=[]
+
+ #create new dico for each process which is going to be launched
+ for i in range(num_cores):
+ dico['num_pac']=i
+ psspy.case(Paths['sav_file'])
+ dico['doc_base']=os.path.join(pathBigFolder,'package'+str(i)+"_N"+folderN_1+day+'_'+hour) #working directory of each package
+
+ if i==num_cores-1:
+ dico['Xt']=Xt[ipos:len(Xt)]
+ dico['timeVect']=timeVect[ipos:len(Xt)]
+ else:
+ dico['Xt']=Xt[ipos:int(((i+1)*np.ceil(float(len(Xt))/float(num_cores))))]
+ dico['timeVect']=timeVect[ipos:int(((i+1)*np.ceil(float(len(Xt))/float(num_cores))))]
+ ipos=int(((i+1)*round(float(len(Xt))/float(num_cores))))
+
+ myMCE = MonteCarloExperiment(inputDistribution,len(dico['Xt']))
+ Samp = myMCE.generate()
+ samples.append(Samp)
+
+ psspy.save(dico['doc_base']+"/BaseCase.sav" ) #create a initial case for each package
+ liste_dico.append(dico.copy()) #append a new dico to the list
+ os.environ['PATH'] += ';' + dico['doc_base'] #add the path of each directory
+
+ cur_dir=os.getcwd() #get the current directory path
+ tmp=sys.stdout #get the stdout path
+
+
+ if NoMultiProcTS:
+ inputSamp, output, Pmachine, LS, FS, LStable, FStable, Output_beforeUC, Pmachine_beforeUC, LS_beforeUC, FS_beforeUC, LStable_beforeUC, FStable_beforeUC = PSSEFunct(liste_dico[0].copy(),np.array(samples[0]))
+
+ else:
+ po=multiprocessing.Pool(maxtasksperchild=1) #create a multiprocessing.Pool object
+ for l in range(num_cores):
+ print "launching PACKAGE "+str(l)
+ p= po.apply_async(PSSEFunct,args=(liste_dico[l].copy(),np.array(samples[l]),),\
+ callback=function_callback_psse) #callback function
+
+ po.close()
+ po.join()
+
+# po=multiprocessing.Pool(maxtasksperchild=1) #create a multiprocessing.Pool object
+# results = [ po.apply(PSSEFunct,args=(liste_dico[l].copy(),np.array(samples[l]),)) for l in range(num_cores) ]
+#
+# for result in results:
+# output.extend(result[1])
+# inputSamp.extend(result[0])#5])
+## Pmachine.extend(result[2])#6])
+#
+# po.close()
+# po.join()
+
+ os.chdir(cur_dir) #back to the working directory
+ sys.stdout=tmp #back to the shell stdout
+
+
+#===============================================================================
+# RECUPERATION DONNEES DE SORTIES ET ECRITURE CSV - OUTPUT RETRIEVAL
+#===============================================================================
+
+ print "Finished multiprocessing"
+
+ for i in Pmachine:
+ ymachine.add(NumericalPoint(i))
+ ymachineMean=ymachine.computeMean()
+
+ for i in output:
+ outputSampleAll.add(NumericalPoint(i))
+ outputDim=outputSampleAll.getDimension()
+ outputSize=outputSampleAll.getSize()
+
+ inputSample=NumericalSample(0,inputDim)
+ for i in inputSamp:
+ inputSample.add(NumericalPoint(i))
+
+ outputSample=NumericalSample(0,outputDim)
+ outputSampleMissed=NumericalSample(0,outputDim)
+
+ for i in range (outputSize):
+ #if outputSampleAll[i,inputDim]==0 :
+ if outputSampleAll[i,3]==0 :
+ outputSampleMissed.add(outputSampleAll[i])
+ else :
+ outputSample.add(outputSampleAll[i])
+
+ outputDescription=[]
+ for i in range (outputDim):
+ outputDescription.append("Y"+str(i))
+ outputSample.setDescription( outputDescription )
+
+ # Get the empirical mean and standard deviations
+ empMeanX = inputSample.computeMean()
+ empSdX = inputSample.computeStandardDeviationPerComponent()
+
+ if int(outputSample.getSize())>0:
+ empiricalMean = outputSample.computeMean()
+ empiricalSd = outputSample.computeStandardDeviationPerComponent()
+ else:
+ print "ALERT: Not a single scenario converged"
+ empiricalMean = ["-"]*outputDim
+ empiricalSd = ["-"]*outputDim
+
+
+
+ # Writing
+ CSVfilename=pathBigFolder+"\simulation_interestValues"+hour+".csv" # Name of the file : global variable
+ f = open(CSVfilename, "a")
+ f.write('CASES SIMULATED: '+str(outputSize)+'\n\n')
+
+ f.write(';;Mean;Standard deviation\n')
+
+ entete=entete.split(';')
+ unit=unit.split(';')
+
+ for name in range (inputDim+outputDim+sizeY0):
+
+ if (name<inputDim):
+ f.write(entete[name]+';'+unit[name]+';'+\
+ str(empMeanX[name])+';'+str(empSdX[name])+'\n')
+ if name==inputDim:
+ f.write('\n')
+## f.write('\n'+entete[name]+';'+unit[name]+';'\
+## +str(empiricalMean[name-inputDim])+';'+\
+## str(empiricalSd[name-inputDim])+'\n')
+ if (inputDim<name<inputDim+outputDim):
+ #pdb.set_trace()
+ f.write(entete[name]+';'+unit[name]+';'\
+ +str(empiricalMean[name-inputDim-1])+';'+\
+ str(empiricalSd[name-inputDim-1])+'\n')
+ if name==(inputDim+outputDim):
+ f.write("\nY:PMachine"+str(plants_base[name-(inputDim+outputDim)][0])+";"\
+ +str(plants_base[name-(inputDim+outputDim)][8])+';'+\
+ str(ymachineMean[name-(inputDim+outputDim)])+"\n")
+ if (inputDim+outputDim<name):
+ f.write("Y:PMachine"+str(plants_base[name-(inputDim+outputDim)][0])+";"\
+ +str(plants_base[name-(inputDim+outputDim)][8])+';'+\
+ str(ymachineMean[name-(inputDim+outputDim)])+"\n")
+
+ if (int(PSSEParams['MVAR_COST'])): #if criteria on Load shed and mvar
+ f.write('\n\nIndicator Load Shedding=;')
+
+ f.write('Indicator Fixed Shunt=;')
+
+ else:
+ f.write('\n\nIndicator NumVoltage=;')
+
+ f.write('Indicator NumTransit=;')
+
+ f.write('\n')
+ for i in range(len(Ind1)):
+ f.write(str(Ind1[i])+';')
+ f.write(str(Ind2[i])+'\n')
+
+ f.close()
+
+ CSVcomplete_filename=pathBigFolder+"\simulationDClog_complete_"+hour+".csv" # Name of the file : global variable
+ f=open(CSVcomplete_filename,"a")
+
+ liste_dico2 = []
+ for k,dico in enumerate(liste_dico):
+ package_folder = dico['doc_base']
+ if os.path.isfile(os.path.join(dico['doc_base'],'Case_1.sav')):
+ liste_dico2.append(dico)
+ else:
+ shutil.rmtree(dico['doc_base'])
+
+
+
+ if dico['TStest']==1: #if Time series, different output file format
+ for k,dico in enumerate(liste_dico2):
+ package_folder = dico['doc_base']
+ package_resultsfile = package_folder + "\\simulationDClog_" + hour + ".csv"
+ g = open(package_resultsfile,"r")
+ if k==0:
+ f.write(g.read())
+ else:
+ g_contents = g.read()
+ g_contents2 = g_contents.split('\n')
+ g_contents_noheaders = '\n'.join(g_contents2[2:])
+## g_contents_noheaders = ''
+## for m in range(2,len(g_contents2)):
+## g_contents_noheaders+=g_contents2[m] + '\n'
+ f.write(g_contents_noheaders)
+ g.close()
+
+ else: #if probabilistic, must treat table output
+ for k,dico in enumerate(liste_dico2):
+ package_folder = dico['doc_base']
+ package_resultsfile = package_folder + "\\simulationDClog_" + hour + ".csv"
+ g = open(package_resultsfile,"r")
+ if k==0:
+ g_contents=g.read()
+ g_headers = g_contents.partition('\n')[0] + "\n"
+ g_contents0 = g_contents.partition('\n')[2]
+ g_headers += g_contents0.partition('\n')[0] + "\n"
+ g_contents_noheaders = g_contents0.partition('\n')[2]
+ g_iterations = g_contents_noheaders.partition('\n\n')[0]
+ it_num = len(g_iterations.split('\n'))
+ g_summarytable = g_contents_noheaders.partition('\n\n')[2]
+ f.write(g_headers)
+ f.write(g_iterations)
+ f.write('\n')
+ else:
+ g_contents = g.read()
+ g_contents_noheaders0 = g_contents.partition('\n')[2]
+ g_contents_noheaders = g_contents_noheaders0.partition('\n')[2]
+ g_iterations = g_contents_noheaders.partition('\n\n')[0]
+ g_summarytable2 = g_contents_noheaders.partition('\n\n')[2]
+ for line in g_summarytable2.split('\n')[2:]:
+ if line != '':
+ g_summarytable += line
+ g_iterations_newnumbers = ""
+ for line in g_iterations.split("\n"): #increment iteration numbers
+ it_num += 1
+ cells=line.split(';')
+ cells[0]=str(it_num)
+ newline=";".join(cells)+'\n'
+ g_iterations_newnumbers+=newline
+ f.write(g_iterations_newnumbers)
+ g.close()
+
+ f.write('\n\n' + g_summarytable) #write summary table at end
+
+ f.close()
+
+ if PSSEParams['ALGORITHM']=='Optimum Power Flow':
+ if PSSEParams['UNIT_COMMITMENT']:
+ # Write the second csv
+ CSVcomplete_filename=pathBigFolder+"\simulationDClog_beforeUC_complete_"+hour+".csv" # Name of the file : global variable
+ f=open(CSVcomplete_filename,"a")
+
+ if dico['TStest']==1: #if Time series, different output file format
+ for k,dico in enumerate(liste_dico2):
+ package_folder = dico['doc_base']
+ package_resultsfile = package_folder + "\\simulationDClog_beforeUC_" + hour + ".csv"
+ g = open(package_resultsfile,"r")
+ if k==0:
+ f.write(g.read())
+ else:
+ g_contents = g.read()
+ g_contents2 = g_contents.split('\n')
+ g_contents_noheaders = '\n'.join(g_contents2[2:])
+ f.write(g_contents_noheaders)
+ g.close()
+
+ else: #if probabilistic, must treat table output
+ for k,dico in enumerate(liste_dico2):
+ ExtraNL = False
+ package_folder = dico['doc_base']
+ package_resultsfile = package_folder + "\\simulationDClog_beforeUC_" + hour + ".csv"
+ g = open(package_resultsfile,"r")
+ if k==0:
+ g_contents=g.read()
+ g_headers = g_contents.partition('\n')[0] + "\n"
+ g_contents0 = g_contents.partition('\n')[2]
+ g_headers += g_contents0.partition('\n')[0] + "\n"
+ g_contents_noheaders = g_contents0.partition('\n')[2]
+ g_iterations = g_contents_noheaders.partition('\n\n')[0]
+ g_iterations_split = g_iterations.split('\n')
+ if g_iterations_split[-1]=="":
+ g_iterations_split = g_iterations_split[0:-1]
+ it_num = len(g_iterations_split)
+ g_summarytable = g_contents_noheaders.partition('\n\n')[2]
+ f.write(g_headers)
+ #f.write(g_iterations)
+ for line in g_iterations_split:
+ f.write(line)
+ f.write('\n')
+ #f.write('\n')
+ else:
+ g_contents = g.read()
+ g_contents_noheaders0 = g_contents.partition('\n')[2]
+ g_contents_noheaders = g_contents_noheaders0.partition('\n')[2]
+ g_iterations = g_contents_noheaders.partition('\n\n')[0]
+ g_iterations_split = g_iterations.split('\n')
+ if g_iterations_split[-1]=="":
+ g_iterations_split = g_iterations_split[0:-1]
+ g_summarytable2 = g_contents_noheaders.partition('\n\n')[2]
+ for line in g_summarytable2.split('\n')[2:]:
+ if line != '':
+ g_summarytable += line
+ g_iterations_newnumbers = ""
+ for line in g_iterations_split: #increment iteration numbers
+ it_num += 1
+ cells=line.split(';')
+ cells[0]=str(it_num)
+ newline=";".join(cells)+'\n'
+ g_iterations_newnumbers+=newline
+ f.write(g_iterations_newnumbers)
+ g.close()
+
+ f.write('\n\n' + g_summarytable) #write summary table at end
+
+ f.close()
+
+ #convert decimal separator to commas for csv files
+ if PSSEParams['DECIMAL_SEPARATOR']==",":
+ csvlist = []
+ for path, subdirs, files in os.walk(pathBigFolder):
+ for name in files:
+ if name.endswith(".csv"):
+ csvlist.append(os.path.join(path, name))
+ for csvfile in csvlist:
+ h = open(csvfile,"rb")
+ crd = csv.reader(h,delimiter=";")
+ csvfiletemp = csvfile[0:-4] + "0" + ".csv"
+ g = open(csvfiletemp, "wb")#, newline='\n')
+ cwt = csv.writer(g, delimiter=";")
+ for row in crd:
+ rowwcommas = []
+ for item in row:
+ try:
+ isnum = float(item)+1
+ rowwcommas.append(str(item).replace(".",","))
+ except:
+ rowwcommas.append(item)
+ cwt.writerow(rowwcommas)
+ h.close()
+ g.close()
+ os.remove(csvfile)
+ shutil.copy2(csvfiletemp, csvfile)
+ os.remove(csvfiletemp)
+
+
+ f=open(exec_file,'a')
+ stop_time=time.clock()
+ stop_time=time.clock()
+ f.write("Stop time: %f; Duration: %f; Time per execution: %f; " \
+ % (round(stop_time), round(stop_time-start_time), round((stop_time-start_time)/outputSize)))
+ f.write("\n\n")
+ f.close()
+
+ print '\n\nSimulated '+str(outputSize)+' cases in '+ str(round(stop_time-start_time))+\
+ ' seconds. Average '+str(round((stop_time-start_time)/outputSize))+'s per case.'
+
+ nMissed=int(outputSampleMissed.getSize())
+
+ print '\n\n Non-convergence rate is '+str(round(nMissed*100/outputSize,3))\
+ +' % ('+str(outputSampleMissed.getSize())+' cases out of '+str(outputSize)+')'
+
+ #graphical_out(inputSample, outputSampleAll, inputDim, outputDim, montecarlosize)
--- /dev/null
+
+############################################################
+# ojectif de ce module: calcul opf pour chaque studycase
+############################################################
+
+import os,sys,pickle,time
+# from support_functionsPF import *#Valentin
+from support_functionsPF import read_pfd,read_pfd_simple,np, config_contingency
+# import PSENconfig # Valentin
+# sys.path.append(PSENconfig.Dico['DIRECTORY']['PF_path'])#Valentin
+# os.environ['PATH'] += ';' + os.path.dirname(os.path.dirname(PSENconfig.Dico['DIRECTORY']['PF_path'])) + ';'#Valentin
+import powerfactory
+import PSENconfig
+import shutil
+import pdb
+import csv
+tempdir = r'C:\Logiciels DER\PSEN_PF_V4\Example\Results'
+
+app = powerfactory.GetApplication()
+
+# app.ActivateProject('39 genstatpvmoteur(4)')#Valentin
+prj = app.GetActiveProject()
+case = app.GetActiveStudyCase()#prj.GetContents('Case_0.IntCase',1)[0]
+# case = prj.GetContents('Case_46.IntCase',1)[0]#Valentin
+# case.Activate()#Valentin
+#app.Show()#Valentin
+
+#[busnumber, outserv, idplant, 0, 0, 0, 0, 0, busname, 0, 0,plant, pgini, pgini_a]
+def saveOPFresults(plants):
+ #save OPF results: P, Q of generators, Transfo taps, Switched shunt settings, Load-shedding
+ upload = app.GetFromStudyCase('ComDbupd') #Sélection commande de mise à jour BDD
+ upload.iopt_lod = 0 # Sélection paramètre MAJ Facteur d'échelle de charge : NON
+ upload.iopt_trf = 1 # Sélection paramètre MAJ Prises de transfos : OUI
+ upload.iopt_distTrf = 1 # Sélection paramètre MAJ Prises de transfos de distrib : OUI
+ upload.iopt_shnt = 1 # Sélection paramètre MAJ pas capacitif shunts/filtres : OUI
+ upload.iopt_lodpq = 0 # Sélection paramètre MAJ P,Q charges : OUI ou NON (selon si on veut ou pas prendre en compte le délestage dans l'initialisation)
+ upload.iopt_asmpq = 1 # Sélection paramètre MAJ P,Q machines asynchrones : OUI
+ #upload.iopt_sympqv = 1 # Sélection paramètre MAJ P,Q,V machines synchrones + statiques : OUI
+ upload.iopt_sympqv = 0 # Sélection paramètre MAJ P,Q,V machines synchrones + statiques : NON
+ upload.iopt_upd = 0 # Option de ne pas mettre à jour la puissance réactive activée
+ upload.iopt_tap = 1 # Option de mettre à jour toutes les prises des transfos
+ upload.Execute() # Exécution mise à jour BDD
+
+ #save P,Q of dispatchable machines (because we dont want to save non-dispatchable machines with triggers (laws)
+ for plant in plants:
+ #if str(plant[11]).endswith('.ElmSym'):
+ try:
+ if plant[11].ictpg == 1:
+ plant[11].pgini = plant[3]
+ plant[11].qgini = plant[4]
+# else: #non-dispatchable machine
+# triggers = plant[11].GetChildren(1, 'pgini.Charef', 1)
+# if len(triggers) == 0:
+# plant[11].qgini = plant[4]
+ except:
+ pass
+
+ return
+
+#def saveOPFresultsLS():
+# #save OPF results: P, Q of generators, Transfo taps, Switched shunt settings, Load-shedding
+# upload = app.GetFromStudyCase('ComDbupd') #Sélection commande de mise à jour BDD
+# upload.iopt_lod = 1 # Sélection paramètre MAJ Facteur d'échelle de charge : NON
+# upload.iopt_trf = 1 # Sélection paramètre MAJ Prises de transfos : OUI
+# upload.iopt_distTrf = 1 # Sélection paramètre MAJ Prises de transfos de distrib : OUI
+# upload.iopt_shnt = 1 # Sélection paramètre MAJ pas capacitif shunts/filtres : OUI
+# upload.iopt_lodpq = 1 # Sélection paramètre MAJ P,Q charges : OUI ou NON (selon si on veut ou pas prendre en compte le délestage dans l'initialisation)
+# upload.iopt_asmpq = 1 # Sélection paramètre MAJ P,Q machines asynchrones : OUI
+# upload.iopt_sympqv = 1 # Sélection paramètre MAJ P,Q,V machines synchrones + statiques : OUI
+# upload.iopt_upd = 0 # Option de ne pas mettre à jour la puissance réactive activée
+# upload.iopt_tap = 1 # Option de mettre à jour toutes les prises des transfos
+# upload.Execute() # Exécution mise à jour BDD
+# return
+
+
+nn=int(''.join(ele for ele in case.loc_name if ele.isdigit()))# cas number
+cas = int(nn)
+scenario_temporaire = app.GetActiveScenario()
+if scenario_temporaire:
+ scenario_temporaire.Deactivate()
+ scenario_temporaire.Delete()
+app.SaveAsScenario('temp0_'+str(nn), 1) # creer scenario pour sauvegarder le cas de base
+scenario_temporaire0 = app.GetActiveScenario()
+scenario_temporaire0.Save()
+scenario_temporaire0.Deactivate()
+
+start = time.clock();
+with open('data_dico', 'rb') as fichier:
+ mon_depickler = pickle.Unpickler(fichier)
+ dico = mon_depickler.load()
+LS_allowed=dico['PFParams']['LOAD_SHEDDING_ALLOWED']
+TStest=dico['TStest']
+position=dico['position']
+PFParams=dico['PFParams']
+sizeY0=dico['sizeY0']
+sizeY1=dico['sizeY1']
+sizeY2=dico['sizeY2']
+sizeY3=dico['sizeY3']
+sizeY4=dico['sizeY4']
+sizeY5=dico['sizeY5']
+sizeY6=dico['sizeY6']
+sizeY7=dico['sizeY7']
+sizeY8=dico['sizeY8']
+sizeY=dico['sizeY']
+gen_UC_list = []
+
+Irate_num = 1
+num_pac = dico['num_pac']
+all_inputs_base = read_pfd_simple(app, prj.loc_name)
+plants_base = all_inputs_base[0]
+loads_base = all_inputs_base[1]
+shunt_base = all_inputs_base[2]
+swshunt_base = all_inputs_base[3]
+
+
+# Total initial (fixed) shunt on buses
+init_shunt = 0
+for i in range(len(shunt_base)):
+ init_shunt += float(shunt_base[i][2])
+
+
+
+if dico['UnitCommitment']:
+
+ app.SaveAsScenario('Case_' + str(nn) + '_beforeUC', 1) # creer scenario pour sauvegarder le cas de base
+ scenario_beforeUC = app.GetActiveScenario()
+
+ opf = app.GetFromStudyCase('ComOpf')
+
+ erropf = opf.Execute()# lancer opf
+ # Traitement specifique pour resoudre des cas difficle a converger
+ if (erropf == 1) and (PFParams['OBJECTIVE_FUNCTION'] == 'MINIMISATION_OF_COST') and PFParams['NON_COST_OPTIMAL_SOLUTION_ALLOWED']:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ ldf = app.GetFromStudyCase('ComLdf')
+ ldf.iopt_initOPF = 1 # utiliser pour OPF
+ ldf.Execute()
+ opf.iInit = 1
+ erropf = opf.Execute() # lancer opf avec 'cst'
+ print(' Run LDF for OPF ')
+ if erropf == 0: print(' OK grace a LDF initial ')
+ else:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ aa = 0
+ while erropf == 1: # si cst ne marche pas
+ scenario_temporaire0.Apply(0)#recuperer scenario initiale
+ aa += 1
+ opf.iopt_obj = 'los' # Fonction objectif = minimisation de la perte totale du reseau
+ erropf = opf.Execute() # run opf los
+ if erropf == 1:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ print(' flat-start to OPF loss ! ! ! ')
+ opf.iInit = 0 # flatstart opf loss
+ erropf = opf.Execute()
+ if erropf == 1:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ break
+ opf.iInit = 1
+ print(' Run OPF loss ')
+ if erropf == 0: # si loss marche bien
+ if (aa == 2)and(LS_allowed):
+ opf.iopt_obj = 'shd'
+ opf.Execute()
+ if aa == 3:
+ # print(' ++++++++++++++++++++++++++++prendre le resultat du OPF LOSS')
+ # erropf = 1
+ # scenario_temporaire0.Apply(0) # recuperer scenario initiale
+
+ filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_LOSS' + '.shdUC','w')
+ #filew = open(tempdir + '/Case_' + str(nn) + '_LOSS' + '.shdUC','w')
+ filew.write('Case_' + str(nn))
+ filew.close()
+ break
+ opf.iopt_obj = 'cst'
+ erropf = opf.Execute() # relancer opt cst
+ if erropf == 0:
+ if (aa == 2)and(LS_allowed):
+ print(' ==================== basculer los-shd')
+ else:
+ print(' OK grace a OPF LOSS =======================LOSS in case aa=' + str(aa))
+ if (erropf==1)and(LS_allowed):
+ aa = 0
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ ldf.Execute() # initiale valeur pour opf shd
+ # opf.iInit = 1
+ while erropf == 1:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ aa += 1
+ opf.iopt_obj = 'shd' # Fonction objectif = minimisation de la perte totale du reseau
+ erropf = opf.Execute()
+ if erropf == 1:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ print(' flat-stat to OPF shd ! ! ! 222 ')
+ opf.iInit = 0
+ erropf = opf.Execute()
+ if erropf == 1:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ break
+ opf.iInit = 1
+ print(' Run OPF SHD ')
+ if erropf == 0: # si shd marche bien
+ if aa == 2:
+ opf.iopt_obj = 'los'
+ opf.Execute()
+ if aa == 3:
+ print(' +++++++++++++++++++++++++prendre le resultat du OPF SHD')
+ filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn)+'_SHD' + '.shdUC','w')
+ #filew = open(tempdir + '/Case_' + str(nn)+'_SHD' + '.shdUC','w')
+ filew.write('Case_' + str(nn) )
+ filew.close()
+ break
+ opf.iopt_obj = 'cst'
+ erropf = opf.Execute() # relancer opt cst
+ if erropf == 0:
+ if aa == 2:
+ print('=== ========== basculer shd-los')
+ # filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_shdlosscost' + '.shdUC', 'w')
+ # filew.write('Case_' + str(nn))
+ # filew.close()
+ else:
+ print(' OK grace a OPF SHD -------------------------------Load SHEDDING in case aa=' + str(aa))
+ # filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_shdcost' + '.shdUC','w')
+ # filew.write('Case_' + str(nn))
+ # filew.close()
+
+
+ loadShed = [[], [], [], [], []]
+ fxshnt = [[], [], []]
+ indexLS = []
+ indexFS = []
+ indicLS = 0
+ indicFS = 0
+ flagLS = 0
+ flagFS = 0
+ ok = False
+
+ if erropf == 0:
+ ok = True
+ else:
+ ok = False
+
+ if ok == True:
+
+ all_inputs = read_pfd(app, prj.loc_name, recal=0)
+
+ # start = stop; # ++++++++++++++++
+ buses = []
+ [buses.append(bus[0:8]) for bus in all_inputs[0]]
+ lines = []
+ [lines.append(bus[0:11]) for bus in all_inputs[1]]
+ transf = []
+ [transf.append(bus[0:11]) for bus in all_inputs[2]]
+ plants = []
+ [plants.append(bus[0:12]) for bus in all_inputs[3]]
+ loads = []
+ [loads.append(bus[0:7]) for bus in all_inputs[4]]
+ shunt = []
+ [shunt.append(bus[0:7]) for bus in all_inputs[5]]
+ motors = []
+ [motors.append(bus[0:6]) for bus in all_inputs[6]]
+ transf3 = []
+ [transf3.append(bus[0:14]) for bus in all_inputs[7]]
+ swshunt = []
+ [swshunt.append(bus[0:6]) for bus in all_inputs[8]]
+
+ # Extraction of the load shedding quantities
+ for ii in range(len(loads)):
+ LSscale = loads[ii][6].GetAttribute('s:scale')
+ P_setpoint = loads[ii][6].GetAttribute('s:pini_set')
+ LS = (1-LSscale) * P_setpoint
+ if abs(LS)>0.1:
+ indexLS.append(ii)
+ flagLS = 1 # raise flag loadshedding
+ loadShed[0].append(nn) # Position seems to correspond to the number of the case we are treating
+ loadShed[1].append(loads[ii][0]) #busnumber
+ loadShed[2].append(loads[ii][4]) #busname
+ loadShed[3].append(LS)
+ loadShed[4].append(loads[ii][1]) #remaining load (voltage rectified)
+
+
+# if abs(loads[ii][1] - loads_base[ii][1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
+# indexLS.append(ii)
+# flagLS = 1 # raise flag loadshedding
+# loadShed[0].append(nn) # Position seems to correspond to the number of the case we are treating
+# # loadShed[0].extend(['' for i in range(len(indexLS) - 1)])
+# loadShed[1].append(loads[ii][0])
+# loadShed[2].append(loads[ii][4])
+# loadShed[3].append(loads_base[ii][1] - loads[ii][1])
+# loadShed[4].append(loads[ii][1])
+
+
+ indicLS = sum(loadShed[3]) # sum all Effective MW loads
+ loadShed = list(zip(*loadShed)) # transpose the matrix
+
+ for ii in range(len(shunt)):
+ if abs(shunt[ii][1] - shunt_base[ii][1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
+ indexFS.append(ii)
+ flagFS = 1 # raise flag loadshedding
+ fxshnt[0].append(nn) # Position seems to correspond to the number of the case we are treating
+ # fxshnt[0].extend(['' for i in range(len(indexFS) - 1)])
+ fxshnt[1].append(shunt[ii][0])
+ fxshnt[2].append(shunt[ii][2])
+ indicFS = sum(fxshnt[2]) # sum all Effective MW loads
+ fxshnt = list(zip(*fxshnt)) # transpose the matrix
+
+ #save OPF results in study case before disconnecting gens
+ saveOPFresults(plants)
+# if opf.iopt_obj=='shd':# and indicLS > 0.1*len(loads_base):
+## for ind in indexLS: # only act on loads that have been shed
+## load = loads_base[ind]
+## #if load[11].iShedding == 1: # if loadshedding allowed on the bus
+# for ind,load in enumerate(loads_base):
+# try: #disactivate triggers, save results
+# loadPscale = load[6].GetChildren(1, 'plini.Charef', 1)
+# loadQscale = load[6].GetChildren(1, 'qlini.Charef', 1)
+# loadPscale[0].outserv = 1
+# loadQscale[0].outserv = 1
+# load[6].plini = loads[ind][1]
+# load[6].qlini = loads[ind][2]
+# except:
+# pass
+ scenario_beforeUC.Save()
+
+ #scenario_beforeUC.Deactivate()
+
+ #gen_UC_list = []
+ for item in plants:
+ bus = item[0]
+ status = item[1]
+ _id = item[2]
+ pgen = item[3]
+ pmax = item[6]
+ try: #will only work for synchronous machines
+ pdispatch = item[11].ictpg
+ except:
+ pdispatch=0
+ if int(pdispatch)==1 and (abs(pgen) <= pmax * 0.02): # if generates at less than 2% of Pmax
+ #if (abs(pgen) <= pmax * 0.02):
+ if status == 0:
+ if not gen_UC_list: #len(gen_UC_list)==0:
+ app.SaveAsScenario('Case_' + str(nn), 1) # creer scenario pour sauvegarder les disponibilites des generateurs
+ scenario_UC = app.GetActiveScenario()
+ # disconnect the plant
+ for plant in plants_base: # chercher l'objet represente generateur
+ if (plant[0] == bus) and (plant[2] == _id) and (
+ plant[11].ip_ctrl != 1): #and plant[11].ictpg==1: # not reference bus
+ plant[11].outserv = 1 # desactiver le groupe
+ outs = plant[11].GetChildren(1, 'outserv.Charef', 1)
+ if outs:
+ outs[0].outserv = 1 # desactive Trigger outserv pour etre sure que le groupe va etre desactive
+ gen_UC_list.append((bus, _id))
+
+ if gen_UC_list: #len(gen_UC_list)!=0:
+ scenario_UC.Save()
+ app.SaveAsScenario('tempUC0_'+str(nn), 1) # creer scenario pour sauvegarder le cas de base
+ scenario_temporaireUC0=app.GetActiveScenario()
+ scenario_temporaireUC0.Save()
+ scenario_temporaireUC0.Deactivate()
+# scenario_temporaireUC0 = scenarioUC
+
+ #scenario_temporaireUC0=app.GetActiveScenario()
+ #scenario_temporaireUC0.Save()
+ #scenario_temporaireUC0.Deactivate()
+ #scenario_temporaireUC0=scenario_UC
+
+ # 3. Affiche Y
+ # sizeY4 = len(shunt)
+ y = np.zeros(2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY3 + 2 * sizeY6 + sizeY4 + sizeY8 + 3 * sizeY5 + 3 * sizeY7)
+ z = [0] * 13
+ rate_mat_index = Irate_num + 2
+ rate_mat_index_3w = Irate_num + 4
+ Ymac = np.zeros(sizeY0)
+ if ok:
+ # Creates the quantities of interest
+ for i in range(sizeY2):
+ if lines[i][rate_mat_index] > 100:
+ z[0] += 1 # Number of lines above 100% of their limits
+ for i in range(sizeY5):
+ if transf[i][rate_mat_index] > 100:
+ z[1] += 1 # Number of transformers above 100% of their limits
+ for i in range(sizeY7):
+ if transf3[i][rate_mat_index_3w] > 100:
+ z[1] += 1 # Add number of 3w transformers above 100% of their limits
+ for i in range(sizeY1):
+ if buses[i][2] > buses[i][5]:
+ z[2] += 1
+ if buses[i][2] < buses[i][4]:
+ z[2] += 1 # Number of buses outside of their voltage limits
+ for i in range(sizeY0):
+ z[3] += float(plants[i][3]) # Total active production
+ for i in range(sizeY3):
+ z[4] += float(loads[i][1]) # Total active consumption
+ for i in range(sizeY6):
+ z[4] += float(motors[i][1]) # add total active consumption from motors
+ z[5] = (z[3] - z[4]) / z[3] * 100 # Active power losses
+ for i in range(sizeY2):
+ if lines[i][rate_mat_index] > z[6]:
+ z[6] = lines[i][rate_mat_index] # Max flow in lines
+ for i in range(sizeY5):
+ if transf[i][rate_mat_index] > z[7]:
+ z[7] = transf[i][rate_mat_index] # Max flow in transformers
+ for i in range(sizeY7):
+ if transf[i][rate_mat_index] > z[7]:
+ z[7] = transf3[i][rate_mat_index_3w] # Max flow in 3w transformers
+ for i in range(sizeY2):
+ if lines[i][rate_mat_index] > 90:
+ z[8] += 1
+ z[8] = z[8] - z[0] # Number of lines between 90% and 100% of their limits
+ for i in range(sizeY5):
+ if transf[i][rate_mat_index] > 90:
+ z[9] += 1
+ for i in range(sizeY7):
+ if transf3[i][rate_mat_index_3w] > 90:
+ z[9] += 1
+ z[9] = z[9] - z[1] # Number of transformers between 90% and 100% of their limits
+
+ z[10] = indicFS
+ z[11] = indicLS
+ z[12] = str(gen_UC_list)
+
+ # Creates the output vectors
+ for Pmach in range(sizeY0):
+ y[Pmach] = float(plants[Pmach][3])
+ Ymac[Pmach] = float(plants[Pmach][3])
+ for Qmach in range(sizeY0):
+ y[Qmach + sizeY0] = float(plants[Qmach][4])
+ for Vbus in range(sizeY1):
+ y[Vbus + 2 * sizeY0] = float(buses[Vbus][2])
+ for Iline in range(sizeY2):
+ y[Iline + 2 * sizeY0 + sizeY1] = float(lines[Iline][rate_mat_index])
+ for Pline in range(sizeY2):
+ y[Pline + 2 * sizeY0 + sizeY1 + sizeY2] = float(lines[Pline][6])
+ for Qline in range(sizeY2):
+ y[Qline + 2 * sizeY0 + sizeY1 + 2 * sizeY2] = float(lines[Qline][7])
+ for Itrans in range(sizeY5):
+ y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2] = float(transf[Itrans][rate_mat_index])
+ for Ptrans in range(sizeY5):
+ y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY5] = float(transf[Ptrans][6])
+ for Qtrans in range(sizeY5):
+ y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 2 * sizeY5] = float(transf[Qtrans][7])
+ for Itrans in range(sizeY7):
+ y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5] = float(
+ transf3[Itrans][rate_mat_index_3w])
+ for Ptrans in range(sizeY7):
+ y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + sizeY7] = float(transf3[Ptrans][8])
+ for Qtrans in range(sizeY7):
+ y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 2 * sizeY7] = float(transf3[Qtrans][9])
+ for Pload in range(sizeY3):
+ y[Pload + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7] = float(loads[Pload][1])
+ for Pmotor in range(sizeY6):
+ y[Pmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3] = float(
+ motors[Pmotor][1])
+ for Qmotor in range(sizeY6):
+ y[Qmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + sizeY6] = float(
+ motors[Qmotor][2])
+ for Qshunt in range(sizeY4):
+ y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6] = float(
+ shunt[Qshunt][4])
+ for Qshunt in range(sizeY8):
+ y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6 + sizeY4] = float(
+ swshunt[Qshunt][4])
+
+ # nz = len(z)
+ #scenario_temporaireUC.Deactivate()
+ #scenario_temporaireUC.Delete()
+
+ res_beforeUC = [list(y), list(z), list(Ymac), indicLS, indicFS, list(loadShed),
+ list(fxshnt)] # sauvegarder le resultat dans un fichier pickle
+ with open(dico['doc_base'] + '/' + app.GetActiveStudyCase().loc_name + '.before', 'wb') as fichier:
+ mon_pickler = pickle.Pickler(fichier, protocol=2)
+ mon_pickler.dump(res_beforeUC)
+
+
+ if len(gen_UC_list) == 0:
+ del z[-1]
+ #change scenario name
+ scenario_beforeUCpost=app.GetActiveScenario()
+ app.SaveAsScenario('Case_' + str(nn), 1) # creer scenario pour sauvegarder le cas de base
+ #scenario_beforeUCpost.Save()
+ scenario_beforeUC.Delete()
+
+ #copy No cost OPF convergence cases for post-UC as well, because no additional treatment will be done.
+ for filename in os.listdir(os.path.dirname(os.path.realpath(__file__))):
+ #for filename in os.listdir(tempdir):
+ if filename.endswith('.shdUC'):
+ #filew = open(os.path.dirname(os.path.realpath(__file__)) + filename + 'UC','w')
+ shutil.copy2(os.path.join(os.path.dirname(os.path.realpath(__file__)), filename), os.path.join(os.path.dirname(os.path.realpath(__file__)),filename[0:-2]))
+ #shutil.copy2(os.path.join(tempdir, filename), os.path.join(tempdir,filename[0:-2] ))
+ #filew.close()
+
+ #----------------------------------RE-run after unit commitment step--------------------------------------------------
+ if len(gen_UC_list)!=0:
+
+ scenario_UC.Activate()
+
+ opf = app.GetFromStudyCase('ComOpf')
+
+ opf.iInit = 0
+ erropf = opf.Execute()
+ # Traitement specifique pour resoudre des cas difficle a converger
+ if (erropf == 1) and (PFParams['OBJECTIVE_FUNCTION'] == 'MINIMISATION_OF_COST') and PFParams['NON_COST_OPTIMAL_SOLUTION_ALLOWED']:
+ scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
+ ldf = app.GetFromStudyCase('ComLdf')
+ ldf.iopt_initOPF = 1 # utiliser pour OPF
+ ldf.Execute()
+ opf.iInit = 1
+ erropf = opf.Execute() # lancer opf avec 'cst'
+ print(' Run LDF for OPF ')
+ if erropf == 0: print(' OK grace a LDF initial ')
+ else:
+ scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
+ aa = 0
+ while erropf == 1: # si cst ne marche pas
+ scenario_temporaireUC0.Apply(0)#recuperer scenario initiale
+ aa += 1
+ opf.iopt_obj = 'los' # Fonction objectif = minimisation de la perte totale du reseau
+ erropf = opf.Execute() # run opf los
+ if erropf == 1:
+ scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
+ print(' flat-stat to OPF loss ! ! ! ')
+ opf.iInit = 0 # flatstart opf loss
+ erropf = opf.Execute()
+ if erropf == 1:
+ scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
+ break
+ opf.iInit = 1
+ print(' Run OPF loss OK ')
+ if erropf == 0: # si los marche bien
+ if (aa == 2)and(LS_allowed):
+ opf.iopt_obj = 'shd'
+ opf.Execute()
+ if aa == 3:
+ # print(' ++++++++++++++++++++++++++++prendre le resultat du OPF LOSS')
+ # erropf = 1
+ # scenario_temporaire0.Apply(0) # recuperer scenario initiale
+
+ filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_LOSS' + '.shd', 'w')
+ #filew = open(tempdir + '/Case_' + str(nn) + '_LOSS' + '.shd', 'w')
+ filew.write('Case_' + str(nn))
+ filew.close()
+ break
+ opf.iopt_obj = 'cst'
+ erropf = opf.Execute() # relancer opt cst
+ if erropf == 0:
+ if (aa == 2)and(LS_allowed):
+ print(' ==================== basculer los-shd')
+ else:
+ print(' OK grace a OPF LOSS =======================LOSS in case aa=' + str(aa))
+ if (erropf==1)and(LS_allowed):
+ aa = 0
+ scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
+ ldf.Execute() # initiale valeur pour opf shd
+ # opf.iInit = 1
+ while erropf == 1:
+ scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
+ aa += 1
+ opf.iopt_obj = 'shd' # Fonction objectif = minimisation de la perte totale du reseau
+ erropf = opf.Execute()
+ if erropf == 1:
+ scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
+ print(' flat-stat to OPF shd ! ! ! 222 ')
+ opf.iInit = 0
+ erropf = opf.Execute()
+ if erropf == 1:
+ scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
+ break
+ opf.iInit = 1
+ print(' Run OPF SHD ')
+ if erropf == 0: # si shd marche bien
+ if aa == 2:
+ opf.iopt_obj = 'los'
+ opf.Execute()
+ if aa == 3:
+ print(' +++++++++++++++++++++++++prendre le resultat du OPF SHD')
+ filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_SHD' + '.shd', 'w')
+ #filew = open(tempdir + '/Case_' + str(nn) + '_SHD' + '.shd', 'w')
+ filew.write('Case_' + str(nn))
+ filew.close()
+ break
+ opf.iopt_obj = 'cst'
+ erropf = opf.Execute() # relancer opt cst
+ if erropf == 0:
+ if aa == 2:
+ print('=== ========== basculer shd-los')
+ # filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str( nn) + '_shdlosscost' + '.shd', 'w')
+ # filew.write('Case_' + str(nn))
+ # filew.close()
+ else:
+ print( ' OK grace a OPF SHD -------------------------------Load SHEDDING in case aa=' + str( aa))
+ # filew = open( os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_shdcost' + '.shd', 'w')
+ # filew.write('Case_' + str(nn))
+ # filew.close()
+
+ # Fin du traitement specifique pour resoudre des cas difficle a converger
+
+ loadShed = [[], [], [], [], []]
+ fxshnt = [[], [], []]
+ indexLS = []
+ indexFS = []
+ indicLS = 0
+ indicFS = 0
+ flagLS = 0
+ flagFS = 0
+ ok = False
+
+ if erropf == 0:
+ ok = True
+ else:
+ ok = False
+
+ if ok == True:
+
+ all_inputs = read_pfd(app, prj.loc_name, recal=0)
+ stop = time.clock();
+ start = stop; # ++++++++++++++++
+ buses = []
+ [buses.append(bus[0:8]) for bus in all_inputs[0]]
+ lines = []
+ [lines.append(bus[0:11]) for bus in all_inputs[1]]
+ transf = []
+ [transf.append(bus[0:11]) for bus in all_inputs[2]]
+ plants = []
+ [plants.append(bus[0:11]) for bus in all_inputs[3]]
+ loads = []
+ [loads.append(bus[0:7]) for bus in all_inputs[4]]
+ shunt = []
+ [shunt.append(bus[0:7]) for bus in all_inputs[5]]
+ motors = []
+ [motors.append(bus[0:6]) for bus in all_inputs[6]]
+ transf3 = []
+ [transf3.append(bus[0:14]) for bus in all_inputs[7]]
+ swshunt = []
+ [swshunt.append(bus[0:6]) for bus in all_inputs[8]]
+
+ # Extraction of the load shedding quantities
+
+
+ for ii in range(len(loads)):
+ LSscale = loads[ii][6].GetAttribute('s:scale')
+ P_setpoint = loads[ii][6].GetAttribute('s:pini_set')
+ LS = (1-LSscale) * P_setpoint
+ if abs(LS)>0.1:
+ indexLS.append(ii)
+ flagLS = 1 # raise flag loadshedding
+ loadShed[0].append(nn) # Position seems to correspond to the number of the case we are treating
+ loadShed[1].append(loads[ii][0]) #busnumber
+ loadShed[2].append(loads[ii][4]) #busname
+ loadShed[3].append(LS)
+ loadShed[4].append(loads[ii][1]) #remaining load (voltage rectified)
+
+
+# if abs(loads[ii][1] - loads_base[ii][1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
+# indexLS.append(ii)
+# flagLS = 1 # raise flag loadshedding
+#
+# loadShed[0].append( nn) # Position seems to correspond to the number of the case we are treating
+# # loadShed[0].extend(['' for i in range(len(indexLS) - 1)])
+# loadShed[1].append(loads[ii][0])
+# loadShed[2].append(loads[ii][4])
+# loadShed[3].append(loads_base[ii][1] - loads[ii][1])
+# loadShed[4].append(loads[ii][1])
+
+
+ indicLS = sum(loadShed[3]) # sum all Effective MW loads
+ loadShed = list(zip(*loadShed)) # transpose the matrix
+
+ for ii in range(len(shunt)):
+ if abs(shunt[ii][1] - shunt_base[ii][1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
+ indexFS.append(ii)
+ flagFS = 1 # raise flag loadshedding
+ fxshnt[0].append(nn) # Position seems to correspond to the number of the case we are treating
+ # fxshnt[0].extend(['' for i in range(len(indexFS) - 1)]) # why [0] ? Maybe it would be better to have 2 lists ? Or a dict ?
+ fxshnt[1].append(shunt[ii][0])
+ fxshnt[2].append(shunt[ii][2])
+ indicFS = sum(fxshnt[2]) # sum all Effective MW loads
+ fxshnt = list(zip(*fxshnt)) # transpose the matrix
+
+ # 3. Affiche Y
+ # sizeY4 = len(shunt)
+ y = np.zeros(2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY3 + 2 * sizeY6 + sizeY4 + sizeY8 + 3 * sizeY5 + 3 * sizeY7)
+ z = np.zeros(12) # np.zeros returns a new array of the given shape and type filled with zeros
+ rate_mat_index = Irate_num + 2
+ rate_mat_index_3w = Irate_num + 4
+ Ymac = np.zeros(sizeY0)
+ if ok:
+ # Creates the quantities of interest
+ for i in range(sizeY2):
+ if lines[i][rate_mat_index] > 100:
+ z[0] += 1 # Number of lines above 100% of their limits
+ for i in range(sizeY5):
+ if transf[i][rate_mat_index] > 100:
+ z[1] += 1 # Number of transformers above 100% of their limits
+ for i in range(sizeY7):
+ if transf3[i][rate_mat_index_3w] > 100:
+ z[1] += 1 # Add number of 3w transformers above 100% of their limits
+ for i in range(sizeY1):
+ if buses[i][2] > buses[i][5]:
+ z[2] += 1
+ if buses[i][2] < buses[i][4]:
+ z[2] += 1 # Number of buses outside of their voltage limits
+ for i in range(sizeY0):
+ z[3] += float(plants[i][3]) # Total active production
+ for i in range(sizeY3):
+ z[4] += float(loads[i][1]) # Total active consumption
+ for i in range(sizeY6):
+ z[4] += float(motors[i][1]) # add total active consumption from motors
+ z[5] = (z[3] - z[4]) / z[3] * 100 # Active power losses
+ for i in range(sizeY2):
+ if lines[i][rate_mat_index] > z[6]:
+ z[6] = lines[i][rate_mat_index] # Max flow in lines
+ for i in range(sizeY5):
+ if transf[i][rate_mat_index] > z[7]:
+ z[7] = transf[i][rate_mat_index] # Max flow in transformers
+ for i in range(sizeY7):
+ if transf[i][rate_mat_index] > z[7]:
+ z[7] = transf3[i][rate_mat_index_3w] # Max flow in 3w transformers
+ for i in range(sizeY2):
+ if lines[i][rate_mat_index] > 90:
+ z[8] += 1
+ z[8] = z[8] - z[0] # Number of lines between 90% and 100% of their limits
+ for i in range(sizeY5):
+ if transf[i][rate_mat_index] > 90:
+ z[9] += 1
+ for i in range(sizeY7):
+ if transf3[i][rate_mat_index_3w] > 90:
+ z[9] += 1
+ z[9] = z[9] - z[1] # Number of transformers between 90% and 100% of their limits
+
+ z[10] = indicFS
+ z[11] = indicLS
+
+ # Creates the output vectors
+ for Pmach in range(sizeY0):
+ y[Pmach] = float(plants[Pmach][3])
+ Ymac[Pmach] = float(plants[Pmach][3])
+ for Qmach in range(sizeY0):
+ y[Qmach + sizeY0] = float(plants[Qmach][4])
+ for Vbus in range(sizeY1):
+ y[Vbus + 2 * sizeY0] = float(buses[Vbus][2])
+ for Iline in range(sizeY2):
+ y[Iline + 2 * sizeY0 + sizeY1] = float(lines[Iline][rate_mat_index])
+ for Pline in range(sizeY2):
+ y[Pline + 2 * sizeY0 + sizeY1 + sizeY2] = float(lines[Pline][6])
+ for Qline in range(sizeY2):
+ y[Qline + 2 * sizeY0 + sizeY1 + 2 * sizeY2] = float(lines[Qline][7])
+ for Itrans in range(sizeY5):
+ y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2] = float(transf[Itrans][rate_mat_index])
+ for Ptrans in range(sizeY5):
+ y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY5] = float(transf[Ptrans][6])
+ for Qtrans in range(sizeY5):
+ y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 2 * sizeY5] = float(transf[Qtrans][7])
+ for Itrans in range(sizeY7):
+ y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5] = float(transf3[Itrans][rate_mat_index_3w])
+ for Ptrans in range(sizeY7):
+ y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + sizeY7] = float(transf3[Ptrans][8])
+ for Qtrans in range(sizeY7):
+ y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 2 * sizeY7] = float(transf3[Qtrans][9])
+ for Pload in range(sizeY3):
+ y[Pload + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7] = float(loads[Pload][1])
+ for Pmotor in range(sizeY6):
+ y[Pmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3] = float(
+ motors[Pmotor][1])
+ for Qmotor in range(sizeY6):
+ y[Qmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + sizeY6] = float(
+ motors[Qmotor][2])
+ for Qshunt in range(sizeY4):
+ y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6] = float(
+ shunt[Qshunt][4])
+ for Qshunt in range(sizeY8):
+ y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6 + sizeY4] = float(
+ swshunt[Qshunt][4])
+
+ #save OPF results in after UC scenario
+ saveOPFresults(plants)
+# if opf.iopt_obj=='shd':# and indicLS > 0.1*len(loads_base):
+# # for ind in indexLS: # only act on loads that have been shed
+# # load = loads_base[ind]
+# # #if load[11].iShedding == 1: # if loadshedding allowed on the bus
+# for ind,load in enumerate(loads_base):
+# try: #disactivate triggers, save results
+# loadPscale = load[6].GetChildren(1, 'plini.Charef', 1)
+# loadQscale = load[6].GetChildren(1, 'qlini.Charef', 1)
+# loadPscale[0].outserv = 1
+# loadQscale[0].outserv = 1
+# load[6].plini = loads[ind][1]
+# load[6].qlini = loads[ind][2]
+# except:
+# pass
+# pass
+ scenario_UC.Save()
+ scenario_temporaireUC0.Delete()
+
+ #scenario_temporaire.Deactivate()
+ #scenario_temporaire.Delete()
+
+
+
+
+if (not dico['UnitCommitment']): # or (dico['UnitCommitment'] and len(gen_UC_list) != 0): # si (pas de Unitcommitment) ou (avec UC et il y a au moins un groupe desactive)
+
+
+ #scenario_temporaire0.Activate() #scenario de base
+
+ app.SaveAsScenario('Case_' + str(nn), 1) # creer scenario pour sauvegarder le cas de base
+ scenario = app.GetActiveScenario()
+ scenario.Activate()
+
+
+
+ opf = app.GetFromStudyCase('ComOpf')
+
+ opf.iInit = 0
+
+
+ erropf = opf.Execute()
+ # Traitement specifique pour resoudre des cas difficle a converger
+ if (erropf == 1) and (PFParams['OBJECTIVE_FUNCTION'] == 'MINIMISATION_OF_COST') and PFParams['NON_COST_OPTIMAL_SOLUTION_ALLOWED']:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ ldf = app.GetFromStudyCase('ComLdf')
+ ldf.iopt_initOPF = 1 # utiliser pour OPF
+ ldf.Execute()
+ opf.iInit = 1
+ erropf = opf.Execute() # lancer opf avec 'cst'
+ print(' Run LDF for OPF ')
+ if erropf == 0: print(' OK grace a LDF initial ')
+ else:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ aa = 0
+ while erropf == 1: # si cst ne marche pas
+ scenario_temporaire0.Apply(0)#recuperer scenario initiale
+ aa += 1
+ opf.iopt_obj = 'los' # Fonction objectif = minimisation de la perte totale du reseau
+ erropf = opf.Execute() # run opf los
+ if erropf == 1:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ print(' flat-stat to OPF loss ! ! ! ')
+ opf.iInit = 0 # flatstart opf loss
+ erropf = opf.Execute()
+ if erropf == 1:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ break
+ opf.iInit = 1
+ print(' Run OPF loss OK ')
+ if erropf == 0: # si los marche bien
+ if (aa == 2)and(LS_allowed):
+ opf.iopt_obj = 'shd'
+ opf.Execute()
+ if aa == 3:
+ # print(' ++++++++++++++++++++++++++++prendre le resultat du OPF LOSS')
+ # erropf = 1
+ # scenario_temporaire0.Apply(0) # recuperer scenario initiale
+
+ filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_LOSS' + '.shd', 'w')
+ #filew = open(tempdir + '/Case_' + str(nn) + '_LOSS' + '.shd', 'w')
+ filew.write('Case_' + str(nn))
+ filew.close()
+ break
+ opf.iopt_obj = 'cst'
+ erropf = opf.Execute() # relancer opt cst
+ if erropf == 0:
+ if (aa == 2)and(LS_allowed):
+ print(' ==================== basculer los-shd')
+ else:
+ print(' OK grace a OPF LOSS =======================LOSS in case aa=' + str(aa))
+ if (erropf==1)and(LS_allowed):
+ aa = 0
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ ldf.Execute() # initiale valeur pour opf shd
+ # opf.iInit = 1
+ while erropf == 1:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ aa += 1
+ opf.iopt_obj = 'shd' # Fonction objectif = minimisation de la perte totale du reseau
+ erropf = opf.Execute()
+ if erropf == 1:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ print(' flat-stat to OPF shd ! ! ! 222 ')
+ opf.iInit = 0
+ erropf = opf.Execute()
+ if erropf == 1:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ break
+ opf.iInit = 1
+ print(' Run OPF SHD ')
+ if erropf == 0: # si shd marche bien
+ if aa == 2:
+ opf.iopt_obj = 'los'
+ opf.Execute()
+ if aa == 3:
+ print(' +++++++++++++++++++++++++prendre le resultat du OPF SHD')
+ filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_SHD' + '.shd', 'w')
+ #filew = open(tempdir + '/Case_' + str(nn) + '_SHD' + '.shd', 'w')
+ filew.write('Case_' + str(nn))
+ filew.close()
+ break
+ opf.iopt_obj = 'cst'
+ erropf = opf.Execute() # relancer opt cst
+ if erropf == 0:
+ if aa == 2:
+ print('=== ========== basculer shd-los')
+ # filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str( nn) + '_shdlosscost' + '.shd', 'w')
+ # filew.write('Case_' + str(nn))
+ # filew.close()
+ else:
+ print( ' OK grace a OPF SHD -------------------------------Load SHEDDING in case aa=' + str( aa))
+ # filew = open( os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_shdcost' + '.shd', 'w')
+ # filew.write('Case_' + str(nn))
+ # filew.close()
+
+ # Fin du traitement specifique pour resoudre des cas difficle a converger
+
+ loadShed = [[], [], [], [], []]
+ fxshnt = [[], [], []]
+ indexLS = []
+ indexFS = []
+ indicLS = 0
+ indicFS = 0
+ flagLS = 0
+ flagFS = 0
+ ok = False
+
+ if erropf == 0:
+ ok = True
+ else:
+ ok = False
+
+ if ok == True:
+
+ all_inputs = read_pfd(app, prj.loc_name, recal=0)
+ stop = time.clock();
+ start = stop; # ++++++++++++++++
+ buses = []
+ [buses.append(bus[0:8]) for bus in all_inputs[0]]
+ lines = []
+ [lines.append(bus[0:11]) for bus in all_inputs[1]]
+ transf = []
+ [transf.append(bus[0:11]) for bus in all_inputs[2]]
+ plants = []
+ [plants.append(bus[0:11]) for bus in all_inputs[3]]
+ loads = []
+ [loads.append(bus[0:7]) for bus in all_inputs[4]]
+ shunt = []
+ [shunt.append(bus[0:7]) for bus in all_inputs[5]]
+ motors = []
+ [motors.append(bus[0:6]) for bus in all_inputs[6]]
+ transf3 = []
+ [transf3.append(bus[0:14]) for bus in all_inputs[7]]
+ swshunt = []
+ [swshunt.append(bus[0:6]) for bus in all_inputs[8]]
+
+ # Extraction of the load shedding quantities
+ for ii in range(len(loads)):
+
+ LSscale = loads[ii][6].GetAttribute('s:scale')
+ P_setpoint = loads[ii][6].GetAttribute('s:pini_set')
+ LS = (1-LSscale) * P_setpoint
+ if abs(LS)>0.1:
+ indexLS.append(ii)
+ flagLS = 1 # raise flag loadshedding
+ loadShed[0].append(nn) # Position seems to correspond to the number of the case we are treating
+ loadShed[1].append(loads[ii][0]) #busnumber
+ loadShed[2].append(loads[ii][4]) #busname
+ loadShed[3].append(LS)
+ loadShed[4].append(loads[ii][1]) #remaining load (voltage rectified)
+
+
+# if abs(loads[ii][1] - loads_base[ii][1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
+# indexLS.append(ii)
+# flagLS = 1 # raise flag loadshedding
+#
+# loadShed[0].append( nn) # Position seems to correspond to the number of the case we are treating
+# # loadShed[0].extend(['' for i in range(len(indexLS) - 1)])
+# loadShed[1].append(loads[ii][0])
+# loadShed[2].append(loads[ii][4])
+# loadShed[3].append(loads_base[ii][1] - loads[ii][1])
+# loadShed[4].append(loads[ii][1])
+
+ indicLS = sum(loadShed[3]) # sum all Effective MW loads
+ loadShed = list(zip(*loadShed)) # transpose the matrix
+
+ for ii in range(len(shunt)):
+ if abs(shunt[ii][1] - shunt_base[ii][1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
+ indexFS.append(ii)
+ flagFS = 1 # raise flag loadshedding
+ fxshnt[0].append(nn) # Position seems to correspond to the number of the case we are treating
+ # fxshnt[0].extend(['' for i in range(len(indexFS) - 1)]) # why [0] ? Maybe it would be better to have 2 lists ? Or a dict ?
+ fxshnt[1].append(shunt[ii][0])
+ fxshnt[2].append(shunt[ii][2])
+ indicFS = sum(fxshnt[2]) # sum all Effective MW loads
+ fxshnt = list(zip(*fxshnt)) # transpose the matrix
+
+ # 3. Affiche Y
+ # sizeY4 = len(shunt)
+ y = np.zeros(2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY3 + 2 * sizeY6 + sizeY4 + sizeY8 + 3 * sizeY5 + 3 * sizeY7)
+ z = np.zeros(12) # np.zeros returns a new array of the given shape and type filled with zeros
+ rate_mat_index = Irate_num + 2
+ rate_mat_index_3w = Irate_num + 4
+ Ymac = np.zeros(sizeY0)
+ if ok:
+ # Creates the quantities of interest
+ for i in range(sizeY2):
+ if lines[i][rate_mat_index] > 100:
+ z[0] += 1 # Number of lines above 100% of their limits
+ for i in range(sizeY5):
+ if transf[i][rate_mat_index] > 100:
+ z[1] += 1 # Number of transformers above 100% of their limits
+ for i in range(sizeY7):
+ if transf3[i][rate_mat_index_3w] > 100:
+ z[1] += 1 # Add number of 3w transformers above 100% of their limits
+ for i in range(sizeY1):
+ if buses[i][2] > buses[i][5]:
+ z[2] += 1
+ if buses[i][2] < buses[i][4]:
+ z[2] += 1 # Number of buses outside of their voltage limits
+ for i in range(sizeY0):
+ z[3] += float(plants[i][3]) # Total active production
+ for i in range(sizeY3):
+ z[4] += float(loads[i][1]) # Total active consumption
+ for i in range(sizeY6):
+ z[4] += float(motors[i][1]) # add total active consumption from motors
+ z[5] = (z[3] - z[4]) / z[3] * 100 # Active power losses
+ for i in range(sizeY2):
+ if lines[i][rate_mat_index] > z[6]:
+ z[6] = lines[i][rate_mat_index] # Max flow in lines
+ for i in range(sizeY5):
+ if transf[i][rate_mat_index] > z[7]:
+ z[7] = transf[i][rate_mat_index] # Max flow in transformers
+ for i in range(sizeY7):
+ if transf[i][rate_mat_index] > z[7]:
+ z[7] = transf3[i][rate_mat_index_3w] # Max flow in 3w transformers
+ for i in range(sizeY2):
+ if lines[i][rate_mat_index] > 90:
+ z[8] += 1
+ z[8] = z[8] - z[0] # Number of lines between 90% and 100% of their limits
+ for i in range(sizeY5):
+ if transf[i][rate_mat_index] > 90:
+ z[9] += 1
+ for i in range(sizeY7):
+ if transf3[i][rate_mat_index_3w] > 90:
+ z[9] += 1
+ z[9] = z[9] - z[1] # Number of transformers between 90% and 100% of their limits
+
+ z[10] = indicFS
+ z[11] = indicLS
+
+ # Creates the output vectors
+ for Pmach in range(sizeY0):
+ y[Pmach] = float(plants[Pmach][3])
+ Ymac[Pmach] = float(plants[Pmach][3])
+ for Qmach in range(sizeY0):
+ y[Qmach + sizeY0] = float(plants[Qmach][4])
+ for Vbus in range(sizeY1):
+ y[Vbus + 2 * sizeY0] = float(buses[Vbus][2])
+ for Iline in range(sizeY2):
+ y[Iline + 2 * sizeY0 + sizeY1] = float(lines[Iline][rate_mat_index])
+ for Pline in range(sizeY2):
+ y[Pline + 2 * sizeY0 + sizeY1 + sizeY2] = float(lines[Pline][6])
+ for Qline in range(sizeY2):
+ y[Qline + 2 * sizeY0 + sizeY1 + 2 * sizeY2] = float(lines[Qline][7])
+ for Itrans in range(sizeY5):
+ y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2] = float(transf[Itrans][rate_mat_index])
+ for Ptrans in range(sizeY5):
+ y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY5] = float(transf[Ptrans][6])
+ for Qtrans in range(sizeY5):
+ y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 2 * sizeY5] = float(transf[Qtrans][7])
+ for Itrans in range(sizeY7):
+ y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5] = float(transf3[Itrans][rate_mat_index_3w])
+ for Ptrans in range(sizeY7):
+ y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + sizeY7] = float(transf3[Ptrans][8])
+ for Qtrans in range(sizeY7):
+ y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 2 * sizeY7] = float(transf3[Qtrans][9])
+ for Pload in range(sizeY3):
+ y[Pload + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7] = float(loads[Pload][1])
+ for Pmotor in range(sizeY6):
+ y[Pmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3] = float(
+ motors[Pmotor][1])
+ for Qmotor in range(sizeY6):
+ y[Qmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + sizeY6] = float(
+ motors[Qmotor][2])
+ for Qshunt in range(sizeY4):
+ y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6] = float(
+ shunt[Qshunt][4])
+ for Qshunt in range(sizeY8):
+ y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6 + sizeY4] = float(
+ swshunt[Qshunt][4])
+
+ saveOPFresults(plants)
+# if opf.iopt_obj=='shd': #and indicLS > 0.1*len(loads_base):
+## for ind in indexLS: # only act on loads that have been shed
+## load = loads_base[ind]
+## #if load[11].iShedding == 1: # if loadshedding allowed on the bus
+# for ind,load in enumerate(loads_base):
+# try: #disactivate triggers, save results
+# loadPscale = load[6].GetChildren(1, 'plini.Charef', 1)
+# loadQscale = load[6].GetChildren(1, 'qlini.Charef', 1)
+# loadPscale[0].outserv = 1
+# loadQscale[0].outserv = 1
+# load[6].plini = loads[ind][1]
+# load[6].qlini = loads[ind][2]
+# except:
+# pass
+
+ scenario.Save()
+ #scenario.Deactivate()
+
+
+scenario_temporaire0.Delete()
+
+
+res_final = [list(y), list(z), list(Ymac), indicLS, indicFS, list(loadShed),
+ list(fxshnt)] # sauvegarder le resultat dans un fichier pickle
+with open(dico['doc_base'] + '/' + app.GetActiveStudyCase().loc_name + '.final', 'wb') as fichier:
+ mon_pickler = pickle.Pickler(fichier, protocol=2)
+ mon_pickler.dump(res_final)
+
+
+
+
+#
+#
+#res_final = [list(y), list(z), list(Ymac), indicLS, indicFS, list(loadShed),
+# list(fxshnt)] # sauvegarder le resultat dans un fichier pickle
+#with open(dico['doc_base'] + '/' + app.GetActiveStudyCase().loc_name + '.final', 'wb') as fichier:
+# mon_pickler = pickle.Pickler(fichier, protocol=2)
+# mon_pickler.dump(res_final)
+
+stop = time.clock();print(' run study cases'+' in ' + str(round(stop - start, 3)) + ' seconds');start = stop;
+# aa=1
--- /dev/null
+############################################################
+# ojectif de ce module: calcul opf pour seulement les studycases que le calcul parallele Comtast.Execute() n'arrive pas a simuler
+############################################################
+
+import time
+import PSENconfig # file with Eficas output dictionaries
+import os,sys,pickle
+import pdb
+# from support_functionsPF import *#Valentin
+from support_functionsPF import read_pfd,read_pfd_simple,np, config_contingency
+from math import *
+import shutil
+from comfile import saveOPFresults
+
+Debug = True
+if Debug:
+ sys.path.append(PSENconfig.Dico['DIRECTORY']['PF_path'])#Valentin
+ os.environ['PATH'] += ';' + os.path.dirname(os.path.dirname(PSENconfig.Dico['DIRECTORY']['PF_path'])) + ';'#Valentin
+
+stop = time.clock(); start = stop;
+with open(os.path.dirname(os.path.realpath(__file__))+'/data_dico', 'rb') as fichier:
+ mon_depickler = pickle.Unpickler(fichier)
+ dico = mon_depickler.load()
+position = dico['position']
+LS_allowed=dico['PFParams']['LOAD_SHEDDING_ALLOWED']
+filer=open(os.path.dirname(os.path.realpath(__file__))+'/absence'+str(position)+'.txt','r')
+_cas=[]
+for line in filer:
+ line=line.replace('\n', '')
+ _cas.append(line)
+filer.close()
+
+##############################################################################/
+import powerfactory
+app = powerfactory.GetApplication()
+user = app.GetCurrentUser()
+prjs = user.GetContents('*.IntPrj')
+prjs.sort(key=lambda x: x.gnrl_modif, reverse=True)
+prj = prjs[0]
+prj.Activate()
+#app.Show()
+
+all_inputs_base = read_pfd_simple(app, prj.loc_name)
+plants_base = all_inputs_base[0]
+loads_base = all_inputs_base[1]
+shunt_base = all_inputs_base[2]
+swshunt_base = all_inputs_base[3]
+
+
+for cas in _cas:
+ print('run studycase' + cas)
+ case = prj.GetContents('Case_'+cas+'.IntCase', 1)[0]
+ case.Activate()
+ scenario_temporaire = app.GetActiveScenario()
+ if scenario_temporaire:
+ scenario_temporaire.Delete()
+ fScen = app.GetProjectFolder('scen') # Dossier contient triggers
+ scen = fScen.GetChildren(1, 'Base.IntScenario', 1)[0]
+ scen.Activate()
+
+ app.SaveAsScenario('temp0_'+cas, 1) # creer scenario pour sauvegarder le cas de base
+ scenario_temporaire0 = app.GetActiveScenario()
+ scenario_temporaire0.Save()
+ scenario_temporaire0.Deactivate()
+
+ ##########################################################
+ nn = int(cas) # cas number
+ settriger_iter = case.GetChildren(1, 'set_iteration.SetTrigger', 1)[0]
+ # settriger_iter.ftrigger = nn
+ start = time.clock();
+ # with open(os.path.dirname(os.path.realpath(__file__)) + '/data_dico', 'rb') as fichier:
+ # mon_depickler = pickle.Unpickler(fichier)
+ # dico = mon_depickler.load()
+
+ TStest = dico['TStest']
+ # position = dico['position']
+ PFParams = dico['PFParams']
+ sizeY0 = dico['sizeY0']
+ sizeY1 = dico['sizeY1']
+ sizeY2 = dico['sizeY2']
+ sizeY3 = dico['sizeY3']
+ sizeY4 = dico['sizeY4']
+ sizeY5 = dico['sizeY5']
+ sizeY6 = dico['sizeY6']
+ sizeY7 = dico['sizeY7']
+ sizeY8 = dico['sizeY8']
+ sizeY = dico['sizeY']
+ gen_UC_list = []
+ # if dico['PFParams']['I_MAX'] == 'RateA':
+ Irate_num = 1
+ # elif dico['PFParams']['I_MAX'] == 'RateB':
+ # Irate_num = 2
+ # elif dico['PFParams']['I_MAX'] == 'RateC':
+ # Irate_num = 3
+ num_pac = dico['num_pac']
+ all_inputs_base = read_pfd_simple(app, prj.loc_name)
+ # buses_base = all_inputs_base[0]
+ # lines_base = all_inputs_base[1]
+ # transf_base = all_inputs_base[2]
+ plants_base = all_inputs_base[0]
+ loads_base = all_inputs_base[1]
+ shunt_base = all_inputs_base[2]
+ # motors_base = all_inputs_base[6]
+ # transf3_base = all_inputs_base[7]
+ swshunt_base = all_inputs_base[3]
+
+# #reactivate load triggers
+# for load in loads_base:
+# try: #re-activate triggers if exist and disactivated
+# loadPscale = load[6].GetChildren(1, 'plini.Charef', 1)
+# loadQscale = load[6].GetChildren(1, 'qlini.Charef', 1)
+# loadPscale[0].outserv = 0
+# loadQscale[0].outserv = 0
+# except:
+# pass
+#
+# #rerun in case triggers were disactivated
+# all_inputs_base = read_pfd_simple(app, prj.loc_name)
+# # buses_base = all_inputs_base[0]
+# # lines_base = all_inputs_base[1]
+# # transf_base = all_inputs_base[2]
+# plants_base = all_inputs_base[0]
+# loads_base = all_inputs_base[1]
+# shunt_base = all_inputs_base[2]
+# # motors_base = all_inputs_base[6]
+# # transf3_base = all_inputs_base[7]
+# swshunt_base = all_inputs_base[3]
+
+ # Total initial (fixed) shunt on buses
+ init_shunt = 0
+ for i in range(len(shunt_base)):
+ init_shunt += float(shunt_base[i][2])
+
+ if dico['UnitCommitment']:
+ app.SaveAsScenario('Case_' + cas + '_beforeUC', 1) # creer scenario pour sauvegarder le cas de base
+ scenario_beforeUC = app.GetActiveScenario()
+
+ opf = app.GetFromStudyCase('ComOpf')
+ erropf = opf.Execute()# lancer opf
+ # Traitement specifique pour resoudre des cas difficle a converger
+ if (erropf == 1) and (PFParams['OBJECTIVE_FUNCTION'] == 'MINIMISATION_OF_COST') and PFParams['NON_COST_OPTIMAL_SOLUTION_ALLOWED']:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ ldf = app.GetFromStudyCase('ComLdf')
+ ldf.iopt_initOPF = 1 # utiliser pour OPF
+ ldf.Execute()
+ opf.iInit = 1
+ erropf = opf.Execute() # lancer opf avec 'cst'
+ print(' Run LDF for OPF ')
+ if erropf == 0: print(' OK grace a LDF initial ')
+ else:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ aa = 0
+ while erropf == 1: # si cst ne marche pas
+ scenario_temporaire0.Apply(0)#recuperer scenario initiale
+ aa += 1
+ opf.iopt_obj = 'los' # Fonction objectif = minimisation de la perte totale du reseau
+ erropf = opf.Execute() # run opf los
+ if erropf == 1:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ print(' flat-stat to OPF loss ! ! ! ')
+ opf.iInit = 0 # flatstart opf loss
+ erropf = opf.Execute()
+ if erropf == 1:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ break
+ opf.iInit = 1
+ print(' Run OPF loss ')
+ if erropf == 0: # si los marche bien
+ if (aa == 2)and(LS_allowed):
+ opf.iopt_obj = 'shd'
+ opf.Execute()
+ if aa == 3:
+ # print(' ++++++++++++++++++++++++++++prendre le resultat du OPF LOSS')
+ # erropf = 1
+ # scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_LOSS' + '.shdUC', 'w')
+ #filew = open(tempdir + '/Case_' + str(nn)+'_LOSS' + '.shdUC','w')
+ filew.write('Case_' + str(nn))
+ filew.close()
+ break
+ opf.iopt_obj = 'cst'
+ erropf = opf.Execute() # relancer opt cst
+ if erropf == 0:
+ if (aa == 2)and(LS_allowed):
+ print(' ==================== basculer los-shd')
+ else:
+ print(' OK grace a OPF LOSS =======================LOSS in case aa=' + str(aa))
+ if (erropf==1)and(LS_allowed):
+ aa = 0
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ ldf.Execute() # initiale valeur pour opf shd
+ # opf.iInit = 1
+ while erropf == 1:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ aa += 1
+ opf.iopt_obj = 'shd' # Fonction objectif = minimisation de la perte totale du reseau
+ erropf = opf.Execute()
+ if erropf == 1:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ print(' flat-stat to OPF shd ! ! ! 222 ')
+ opf.iInit = 0
+ erropf = opf.Execute()
+ if erropf == 1:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ break
+ opf.iInit = 1
+ print(' Run OPF SHD ')
+ if erropf == 0: # si shd marche bien
+ if aa == 2:
+ opf.iopt_obj = 'los'
+ opf.Execute()
+ if aa == 3:
+ print(' +++++++++++++++++++++++++prendre le resultat du OPF SHD')
+ filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_SHD' + '.shdUC', 'w')
+ #filew = open(tempdir + '/Case_' + str(nn)+'_SHD' + '.shdUC','w')
+ filew.write('Case_' + str(nn))
+ filew.close()
+ break
+ opf.iopt_obj = 'cst'
+ erropf = opf.Execute() # relancer opt cst
+ if erropf == 0:
+ if aa == 2:
+ print('=== ========== basculer shd-los')
+ # filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(
+ # nn) + '_shdlosscost' + '.shdUC', 'w')
+ # filew.write('Case_' + str(nn))
+ # filew.close()
+ else:
+ print(
+ ' OK grace a OPF SHD -------------------------------Load SHEDDING in case aa=' + str(aa))
+ # filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_shdcost' + '.shdUC','w')
+ # filew.write('Case_' + str(nn))
+ # filew.close()
+
+
+ loadShed = [[], [], [], [], []]
+ fxshnt = [[], [], []]
+ indexLS = []
+ indexFS = []
+ indicLS = 0
+ indicFS = 0
+ flagLS = 0
+ flagFS = 0
+ ok = False
+
+ if erropf == 0:
+ ok = True
+ else:
+ ok = False
+
+ if ok == True:
+
+ all_inputs = read_pfd(app, prj.loc_name, recal=0)
+
+ # start = stop; # ++++++++++++++++
+ buses = []
+ [buses.append(bus[0:8]) for bus in all_inputs[0]]
+ lines = []
+ [lines.append(bus[0:11]) for bus in all_inputs[1]]
+ transf = []
+ [transf.append(bus[0:11]) for bus in all_inputs[2]]
+ plants = []
+ [plants.append(bus[0:12]) for bus in all_inputs[3]]
+ loads = []
+ [loads.append(bus[0:7]) for bus in all_inputs[4]]
+ shunt = []
+ [shunt.append(bus[0:7]) for bus in all_inputs[5]]
+ motors = []
+ [motors.append(bus[0:6]) for bus in all_inputs[6]]
+ transf3 = []
+ [transf3.append(bus[0:14]) for bus in all_inputs[7]]
+ swshunt = []
+ [swshunt.append(bus[0:6]) for bus in all_inputs[8]]
+
+ # Extraction of the load shedding quantities
+ for ii in range(len(loads)):
+
+ LSscale = loads[ii][6].GetAttribute('s:scale')
+ P_setpoint = loads[ii][6].GetAttribute('s:pini_set')
+ LS = (1-LSscale) * P_setpoint
+ if abs(LS)>0.1:
+ indexLS.append(ii)
+ flagLS = 1 # raise flag loadshedding
+ loadShed[0].append(position) # Position seems to correspond to the number of the case we are treating
+ loadShed[1].append(loads[ii][0]) #busnumber
+ loadShed[2].append(loads[ii][4]) #busname
+ loadShed[3].append(LS)
+ loadShed[4].append(loads[ii][1]) #remaining load (voltage rectified)
+
+# if (loads[ii][1] - loads_base[ii][
+# 1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
+# indexLS.append(ii)
+# flagLS = 1 # raise flag loadshedding
+#
+# loadShed[0].append(
+# position) # Position seems to correspond to the number of the case we are treating
+# loadShed[0].extend(['' for i in range(len(indexLS) - 1)])
+# loadShed[1].append(loads[ii][0])
+# loadShed[2].append(loads[ii][4])
+# loadShed[3].append(loads_base[ii][1] - loads[ii][1])
+# loadShed[4].append(loads[ii][1])
+
+
+ indicLS = sum(loadShed[3]) # sum all Effective MW loads
+ loadShed = list(zip(*loadShed)) # transpose the matrix
+
+ for ii in range(len(shunt)):
+ if (shunt[ii][1] - shunt_base[ii][
+ 1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
+ indexFS.append(ii)
+ flagFS = 1 # raise flag loadshedding
+ fxshnt[0].append(position) # Position seems to correspond to the number of the case we are treating
+ fxshnt[0].extend(['' for i in range(
+ len(indexFS) - 1)])
+ fxshnt[1].append(shunt[ii][0])
+ fxshnt[2].append(shunt[ii][2])
+ indicFS = sum(fxshnt[2]) # sum all Effective MW loads
+ fxshnt = list(zip(*fxshnt)) # transpose the matrix
+
+ #save OPF results in study case before disconnecting gens
+ saveOPFresults(plants)
+# if opf.iopt_obj=='shd':# and indicLS > 0.1*len(loads_base):
+# # for ind in indexLS: # only act on loads that have been shed
+# # load = loads_base[ind]
+# # #if load[11].iShedding == 1: # if loadshedding allowed on the bus
+# for ind,load in enumerate(loads_base):
+# try: #disactivate triggers, save results
+# loadPscale = load[6].GetChildren(1, 'plini.Charef', 1)
+# loadQscale = load[6].GetChildren(1, 'qlini.Charef', 1)
+# loadPscale[0].outserv = 1
+# loadQscale[0].outserv = 1
+# load[6].plini = loads[ind][1]
+# load[6].qlini = loads[ind][2]
+# except:
+# pass
+ scenario_beforeUC.Save()
+
+ #scenario_beforeUC.Deactivate()
+
+
+ #gen_UC_list = []
+ for item in plants:
+ bus = item[0]
+ status = item[1]
+ _id = item[2]
+ pgen = item[3]
+ pmax = item[6]
+ try: #will only work for synchronous machines
+ pdispatch = item[11].ictpg
+ except:
+ pdispatch=0
+ if int(pdispatch)==1 and (abs(pgen) <= pmax * 0.02): # if generates at less than 2% of Pmax
+ #if (abs(pgen) <= pmax * 0.02):
+ if status == 0:
+ if not gen_UC_list: #len(gen_UC_list)==0:
+ app.SaveAsScenario('Case_' + str(nn), 1) # creer scenario pour sauvegarder les disponibilites des generateurs
+ scenario_UC = app.GetActiveScenario()
+ # disconnect the plant
+ for plant in plants_base: # chercher l'objet represente generateur
+ if (plant[0] == bus) and (plant[2] == _id) and (
+ plant[11].ip_ctrl != 1): #and plant[11].ictpg==1: # not reference bus
+ plant[11].outserv = 1 # desactiver le groupe
+ outs = plant[11].GetChildren(1, 'outserv.Charef', 1)
+ if outs:
+ outs[0].outserv = 1 # desactive Trigger outserv pour etre sure que le groupe va etre desactive
+ gen_UC_list.append((bus, _id))
+
+ if gen_UC_list: #len(gen_UC_list)!=0:
+ scenario_UC.Save()
+ app.SaveAsScenario('tempUC0_'+cas, 1) # creer scenario pour sauvegarder le cas de base
+ scenario_temporaireUC0=app.GetActiveScenario()
+ scenario_temporaireUC0.Save()
+ scenario_temporaireUC0.Deactivate()
+# scenario_temporaireUC0 = scenarioUC
+
+ # 3. Affiche Y
+ # sizeY4 = len(shunt)
+ y = np.zeros(2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY3 + 2 * sizeY6 + sizeY4 + sizeY8 + 3 * sizeY5 + 3 * sizeY7)
+ z = [0] * 13
+ rate_mat_index = Irate_num + 2
+ rate_mat_index_3w = Irate_num + 4
+ Ymac = np.zeros(sizeY0)
+ if ok:
+ # Creates the quantities of interest
+ for i in range(sizeY2):
+ if lines[i][rate_mat_index] > 100:
+ z[0] += 1 # Number of lines above 100% of their limits
+ for i in range(sizeY5):
+ if transf[i][rate_mat_index] > 100:
+ z[1] += 1 # Number of transformers above 100% of their limits
+ for i in range(sizeY7):
+ if transf3[i][rate_mat_index_3w] > 100:
+ z[1] += 1 # Add number of 3w transformers above 100% of their limits
+ for i in range(sizeY1):
+ if buses[i][2] > buses[i][5]:
+ z[2] += 1
+ if buses[i][2] < buses[i][4]:
+ z[2] += 1 # Number of buses outside of their voltage limits
+ for i in range(sizeY0):
+ z[3] += float(plants[i][3]) # Total active production
+ for i in range(sizeY3):
+ z[4] += float(loads[i][1]) # Total active consumption
+ for i in range(sizeY6):
+ z[4] += float(motors[i][1]) # add total active consumption from motors
+ z[5] = (z[3] - z[4]) / z[3] * 100 # Active power losses
+ for i in range(sizeY2):
+ if lines[i][rate_mat_index] > z[6]:
+ z[6] = lines[i][rate_mat_index] # Max flow in lines
+ for i in range(sizeY5):
+ if transf[i][rate_mat_index] > z[7]:
+ z[7] = transf[i][rate_mat_index] # Max flow in transformers
+ for i in range(sizeY7):
+ if transf[i][rate_mat_index] > z[7]:
+ z[7] = transf3[i][rate_mat_index_3w] # Max flow in 3w transformers
+ for i in range(sizeY2):
+ if lines[i][rate_mat_index] > 90:
+ z[8] += 1
+ z[8] = z[8] - z[0] # Number of lines between 90% and 100% of their limits
+ for i in range(sizeY5):
+ if transf[i][rate_mat_index] > 90:
+ z[9] += 1
+ for i in range(sizeY7):
+ if transf3[i][rate_mat_index_3w] > 90:
+ z[9] += 1
+ z[9] = z[9] - z[1] # Number of transformers between 90% and 100% of their limits
+
+ z[10] = indicFS
+ z[11] = indicLS
+ z[12] = str(gen_UC_list)
+
+ # Creates the output vectors
+ for Pmach in range(sizeY0):
+ y[Pmach] = float(plants[Pmach][3])
+ Ymac[Pmach] = float(plants[Pmach][3])
+ for Qmach in range(sizeY0):
+ y[Qmach + sizeY0] = float(plants[Qmach][4])
+ for Vbus in range(sizeY1):
+ y[Vbus + 2 * sizeY0] = float(buses[Vbus][2])
+ for Iline in range(sizeY2):
+ y[Iline + 2 * sizeY0 + sizeY1] = float(lines[Iline][rate_mat_index])
+ for Pline in range(sizeY2):
+ y[Pline + 2 * sizeY0 + sizeY1 + sizeY2] = float(lines[Pline][6])
+ for Qline in range(sizeY2):
+ y[Qline + 2 * sizeY0 + sizeY1 + 2 * sizeY2] = float(lines[Qline][7])
+ for Itrans in range(sizeY5):
+ y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2] = float(transf[Itrans][rate_mat_index])
+ for Ptrans in range(sizeY5):
+ y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY5] = float(transf[Ptrans][6])
+ for Qtrans in range(sizeY5):
+ y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 2 * sizeY5] = float(transf[Qtrans][7])
+ for Itrans in range(sizeY7):
+ y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5] = float(
+ transf3[Itrans][rate_mat_index_3w])
+ for Ptrans in range(sizeY7):
+ y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + sizeY7] = float(transf3[Ptrans][8])
+ for Qtrans in range(sizeY7):
+ y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 2 * sizeY7] = float(transf3[Qtrans][9])
+ for Pload in range(sizeY3):
+ y[Pload + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7] = float(loads[Pload][1])
+ for Pmotor in range(sizeY6):
+ y[Pmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3] = float(
+ motors[Pmotor][1])
+ for Qmotor in range(sizeY6):
+ y[Qmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + sizeY6] = float(
+ motors[Qmotor][2])
+ for Qshunt in range(sizeY4):
+ y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6] = float(
+ shunt[Qshunt][4])
+ for Qshunt in range(sizeY8):
+ y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6 + sizeY4] = float(
+ swshunt[Qshunt][4])
+
+ # nz = len(z)
+ #scenario_temporaireUC.Deactivate()
+ #scenario_temporaireUC.Delete()
+
+ res_beforeUC = [list(y), list(z), list(Ymac), indicLS, indicFS, list(loadShed),
+ list(fxshnt)] # sauvegarder le resultat dans un fichier pickle
+
+ with open(dico['doc_base'] + '/' + app.GetActiveStudyCase().loc_name + '.before', 'wb') as fichier:
+ mon_pickler = pickle.Pickler(fichier, protocol=2)
+ mon_pickler.dump(res_beforeUC)
+
+ if len(gen_UC_list) == 0:
+ del z[-1]
+ #change scenario name
+ scenario_beforeUCpost=app.GetActiveScenario()
+ app.SaveAsScenario('Case_' + str(nn), 1) # creer scenario pour sauvegarder le cas de base
+ #scenario_beforeUCpost.Save()
+ scenario_beforeUC.Delete()
+
+
+ #copy No cost OPF convergence cases for post-UC as well, because no additional treatment was done.
+ for filename in os.listdir(os.path.dirname(os.path.realpath(__file__))):
+ #for filename in os.listdir(tempdir):
+ if filename.endswith('.shdUC'):
+ #filew = open(os.path.dirname(os.path.realpath(__file__)) + filename + 'UC','w')
+ shutil.copy2(os.path.join(os.path.dirname(os.path.realpath(__file__)), filename), os.path.join(os.path.dirname(os.path.realpath(__file__)),filename[0:-2]))
+ #shutil.copy2(os.path.join(tempdir, filename), os.path.join(tempdir,filename[0:-2]))
+ #filew.close()
+
+ #----------------------------------RE-run after unit commitment step--------------------------------------------------
+ if len(gen_UC_list)!=0:
+
+ #scenario_temporaire0.Activate()
+
+ #scenario_temporaire0.Apply(0)
+ #scenario_UC.Apply(0)
+ scenario_UC.Activate()
+
+ #app.SaveAsScenario('temp' + cas, 1) # creer scenario pour sauvegarder le cas de base
+ #scenario_temporaire = app.GetActiveScenario()
+ opf = app.GetFromStudyCase('ComOpf')
+
+ opf.iInit = 0
+ erropf = opf.Execute()
+ # Traitement specifique pour resoudre des cas difficle a converger
+ if (erropf == 1) and (PFParams['OBJECTIVE_FUNCTION'] == 'MINIMISATION_OF_COST') and PFParams['NON_COST_OPTIMAL_SOLUTION_ALLOWED']:
+ scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
+ ldf = app.GetFromStudyCase('ComLdf')
+ ldf.iopt_initOPF = 1 # utiliser pour OPF
+ ldf.Execute()
+ opf.iInit = 1
+ erropf = opf.Execute() # lancer opf avec 'cst'
+ print(' Run LDF for OPF ')
+ if erropf == 0: print(' OK grace a LDF initial ')
+ else:
+ scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
+ aa = 0
+ while erropf == 1: # si cst ne marche pas
+ scenario_temporaireUC0.Apply(0)#recuperer scenario initiale
+ aa += 1
+ opf.iopt_obj = 'los' # Fonction objectif = minimisation de la perte totale du reseau
+ erropf = opf.Execute() # run opf los
+ if erropf == 1:
+ scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
+ print(' flat-stat to OPF loss ! ! ! ')
+ opf.iInit = 0 # flatstart opf loss
+ erropf = opf.Execute()
+ if erropf == 1:
+ scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
+ break
+ opf.iInit = 1
+ print(' Run OPF loss OK ')
+ if erropf == 0: # si los marche bien
+ if (aa == 2)and(LS_allowed):
+ opf.iopt_obj = 'shd'
+ opf.Execute()
+ if aa == 3:
+ # print(' ++++++++++++++++++++++++++++prendre le resultat du OPF LOSS')
+ # erropf = 1
+ # scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_LOSS' + '.shd', 'w')
+ #filew = open(tempdir + '/Case_' + str(nn)+'_LOSS' + '.shd','w')
+ filew.write('Case_' + str(nn))
+ filew.close()
+ break
+ opf.iopt_obj = 'cst'
+ erropf = opf.Execute() # relancer opt cst
+ if erropf == 0:
+ if (aa == 2)and(LS_allowed):
+ print(' ==================== basculer los-shd')
+ else:
+ print(' OK grace a OPF LOSS =======================LOSS in case aa=' + str(aa))
+ if (erropf==1)and(LS_allowed):
+ aa = 0
+ scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
+ ldf.Execute() # initiale valeur pour opf shd
+ # opf.iInit = 1
+ while erropf == 1:
+ scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
+ aa += 1
+ opf.iopt_obj = 'shd' # Fonction objectif = minimisation de la perte totale du reseau
+ erropf = opf.Execute()
+ if erropf == 1:
+ scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
+ print(' flat-stat to OPF shd ! ! ! 222 ')
+ opf.iInit = 0
+ erropf = opf.Execute()
+ if erropf == 1:
+ scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
+ break
+ opf.iInit = 1
+ print(' Run OPF SHD ')
+ if erropf == 0: # si shd marche bien
+ if aa == 2:
+ opf.iopt_obj = 'los'
+ opf.Execute()
+ if aa == 3:
+ print(' +++++++++++++++++++++++++prendre le resultat du OPF SHD')
+ filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_SHD' + '.shd','w')
+ #filew = open(tempdir + '/Case_' + str(nn)+'_SHD' + '.shd','w')
+ filew.write('Case_' + str(nn))
+ filew.close()
+ break
+ opf.iopt_obj = 'cst'
+ erropf = opf.Execute() # relancer opt cst
+ if erropf == 0:
+ if aa == 2:
+ print('=== ========== basculer shd-los')
+ # filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(
+ # nn) + '_shdlosscost' + '.shd', 'w')
+ # filew.write('Case_' + str(nn))
+ # filew.close()
+ else:
+ print(
+ ' OK grace a OPF SHD -------------------------------Load SHEDDING in case aa=' + str(
+ aa))
+ # filew = open( os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_shdcost' + '.shd', 'w')
+ # filew.write('Case_' + str(nn))
+ # filew.close()
+ # Fin du traitement specifique pour resoudre des cas difficle a converger
+
+ loadShed = [[], [], [], [], []]
+ fxshnt = [[], [], []]
+ indexLS = []
+ indexFS = []
+ indicLS = 0
+ indicFS = 0
+ flagLS = 0
+ flagFS = 0
+ ok = False
+
+ if erropf == 0:
+ ok = True
+ else:
+ ok = False
+
+ if ok == True:
+
+ all_inputs = read_pfd(app, prj.loc_name, recal=0)
+ stop = time.clock();
+ start = stop; # ++++++++++++++++
+ buses = []
+ [buses.append(bus[0:8]) for bus in all_inputs[0]]
+ lines = []
+ [lines.append(bus[0:11]) for bus in all_inputs[1]]
+ transf = []
+ [transf.append(bus[0:11]) for bus in all_inputs[2]]
+ plants = []
+ [plants.append(bus[0:11]) for bus in all_inputs[3]]
+ loads = []
+ [loads.append(bus[0:7]) for bus in all_inputs[4]]
+ shunt = []
+ [shunt.append(bus[0:7]) for bus in all_inputs[5]]
+ motors = []
+ [motors.append(bus[0:6]) for bus in all_inputs[6]]
+ transf3 = []
+ [transf3.append(bus[0:14]) for bus in all_inputs[7]]
+ swshunt = []
+ [swshunt.append(bus[0:6]) for bus in all_inputs[8]]
+
+ # Extraction of the load shedding quantities
+ for ii in range(len(loads)):
+
+ LSscale = loads[ii][6].GetAttribute('s:scale')
+ P_setpoint = loads[ii][6].GetAttribute('s:pini_set')
+ LS = (1-LSscale) * P_setpoint
+ if abs(LS)>0.1:
+ indexLS.append(ii)
+ flagLS = 1 # raise flag loadshedding
+ loadShed[0].append(position) # Position seems to correspond to the number of the case we are treating
+ loadShed[1].append(loads[ii][0]) #busnumber
+ loadShed[2].append(loads[ii][4]) #busname
+ loadShed[3].append(LS)
+ loadShed[4].append(loads[ii][1]) #remaining load (voltage rectified)
+
+# if (loads[ii][1] - loads_base[ii][
+# 1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
+# indexLS.append(ii)
+# flagLS = 1 # raise flag loadshedding
+#
+# loadShed[0].append(
+# position) # Position seems to correspond to the number of the case we are treating
+# #loadShed[0].extend(['' for i in range(len(indexLS) - 1)])
+# loadShed[1].append(loads[ii][0])
+# loadShed[2].append(loads[ii][4])
+# loadShed[3].append(loads_base[ii][1] - loads[ii][1])
+# loadShed[4].append(loads[ii][1])
+
+
+ indicLS = sum(loadShed[3]) # sum all Effective MW loads
+ loadShed = list(zip(*loadShed)) # transpose the matrix
+
+ for ii in range(len(shunt)):
+ if (shunt[ii][1] - shunt_base[ii][
+ 1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
+ indexFS.append(ii)
+ flagFS = 1 # raise flag loadshedding
+ fxshnt[0].append(position) # Position seems to correspond to the number of the case we are treating
+ fxshnt[0].extend(['' for i in range(
+ len(indexFS) - 1)]) # why [0] ? Maybe it would be better to have 2 lists ? Or a dict ?
+ fxshnt[1].append(shunt[ii][0])
+ fxshnt[2].append(shunt[ii][2])
+ indicFS = sum(fxshnt[2]) # sum all Effective MW loads
+ fxshnt = list(zip(*fxshnt)) # transpose the matrix
+
+ # 3. Affiche Y
+ # sizeY4 = len(shunt)
+ y = np.zeros(2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY3 + 2 * sizeY6 + sizeY4 + sizeY8 + 3 * sizeY5 + 3 * sizeY7)
+ z = np.zeros(12) # np.zeros returns a new array of the given shape and type filled with zeros
+ rate_mat_index = Irate_num + 2
+ rate_mat_index_3w = Irate_num + 4
+ Ymac = np.zeros(sizeY0)
+ if ok:
+ # Creates the quantities of interest
+ for i in range(sizeY2):
+ if lines[i][rate_mat_index] > 100:
+ z[0] += 1 # Number of lines above 100% of their limits
+ for i in range(sizeY5):
+ if transf[i][rate_mat_index] > 100:
+ z[1] += 1 # Number of transformers above 100% of their limits
+ for i in range(sizeY7):
+ if transf3[i][rate_mat_index_3w] > 100:
+ z[1] += 1 # Add number of 3w transformers above 100% of their limits
+ for i in range(sizeY1):
+ if buses[i][2] > buses[i][5]:
+ z[2] += 1
+ if buses[i][2] < buses[i][4]:
+ z[2] += 1 # Number of buses outside of their voltage limits
+ for i in range(sizeY0):
+ z[3] += float(plants[i][3]) # Total active production
+ for i in range(sizeY3):
+ z[4] += float(loads[i][1]) # Total active consumption
+ for i in range(sizeY6):
+ z[4] += float(motors[i][1]) # add total active consumption from motors
+ z[5] = (z[3] - z[4]) / z[3] * 100 # Active power losses
+ for i in range(sizeY2):
+ if lines[i][rate_mat_index] > z[6]:
+ z[6] = lines[i][rate_mat_index] # Max flow in lines
+ for i in range(sizeY5):
+ if transf[i][rate_mat_index] > z[7]:
+ z[7] = transf[i][rate_mat_index] # Max flow in transformers
+ for i in range(sizeY7):
+ if transf[i][rate_mat_index] > z[7]:
+ z[7] = transf3[i][rate_mat_index_3w] # Max flow in 3w transformers
+ for i in range(sizeY2):
+ if lines[i][rate_mat_index] > 90:
+ z[8] += 1
+ z[8] = z[8] - z[0] # Number of lines between 90% and 100% of their limits
+ for i in range(sizeY5):
+ if transf[i][rate_mat_index] > 90:
+ z[9] += 1
+ for i in range(sizeY7):
+ if transf3[i][rate_mat_index_3w] > 90:
+ z[9] += 1
+ z[9] = z[9] - z[1] # Number of transformers between 90% and 100% of their limits
+
+ z[10] = indicFS
+ z[11] = indicLS
+
+ # Creates the output vectors
+ for Pmach in range(sizeY0):
+ y[Pmach] = float(plants[Pmach][3])
+ Ymac[Pmach] = float(plants[Pmach][3])
+ for Qmach in range(sizeY0):
+ y[Qmach + sizeY0] = float(plants[Qmach][4])
+ for Vbus in range(sizeY1):
+ y[Vbus + 2 * sizeY0] = float(buses[Vbus][2])
+ for Iline in range(sizeY2):
+ y[Iline + 2 * sizeY0 + sizeY1] = float(lines[Iline][rate_mat_index])
+ for Pline in range(sizeY2):
+ y[Pline + 2 * sizeY0 + sizeY1 + sizeY2] = float(lines[Pline][6])
+ for Qline in range(sizeY2):
+ y[Qline + 2 * sizeY0 + sizeY1 + 2 * sizeY2] = float(lines[Qline][7])
+ for Itrans in range(sizeY5):
+ y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2] = float(transf[Itrans][rate_mat_index])
+ for Ptrans in range(sizeY5):
+ y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY5] = float(transf[Ptrans][6])
+ for Qtrans in range(sizeY5):
+ y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 2 * sizeY5] = float(transf[Qtrans][7])
+ for Itrans in range(sizeY7):
+ y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5] = float(transf3[Itrans][rate_mat_index_3w])
+ for Ptrans in range(sizeY7):
+ y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + sizeY7] = float(transf3[Ptrans][8])
+ for Qtrans in range(sizeY7):
+ y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 2 * sizeY7] = float(transf3[Qtrans][9])
+ for Pload in range(sizeY3):
+ y[Pload + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7] = float(loads[Pload][1])
+ for Pmotor in range(sizeY6):
+ y[Pmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3] = float(
+ motors[Pmotor][1])
+ for Qmotor in range(sizeY6):
+ y[Qmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + sizeY6] = float(
+ motors[Qmotor][2])
+ for Qshunt in range(sizeY4):
+ y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6] = float(
+ shunt[Qshunt][4])
+ for Qshunt in range(sizeY8):
+ y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6 + sizeY4] = float(
+ swshunt[Qshunt][4])
+
+ saveOPFresults(plants)
+# if opf.iopt_obj=='shd':# and indicLS > 0.1*len(loads_base):
+# # for ind in indexLS: # only act on loads that have been shed
+# # load = loads_base[ind]
+# # #if load[11].iShedding == 1: # if loadshedding allowed on the bus
+# for ind,load in enumerate(loads_base):
+# try: #disactivate triggers, save results
+# loadPscale = load[6].GetChildren(1, 'plini.Charef', 1)
+# loadQscale = load[6].GetChildren(1, 'qlini.Charef', 1)
+# loadPscale[0].outserv = 1
+# loadQscale[0].outserv = 1
+# load[6].plini = loads[ind][1]
+# load[6].qlini = loads[ind][2]
+# except:
+# pass
+#
+ scenario_UC.Save()
+ scenario_temporaireUC0.Delete()
+
+ if (not dico['UnitCommitment']): # or (dico['UnitCommitment'] and len(gen_UC_list) != 0): # si (pas de Unitcommitment) ou (avec UC et il y a au moins un groupe desactive)
+
+ #scenario_temporaire0.Activate()
+
+ #if len(gen_UC_list)!=0:# deja desactive au moin 1 generateur
+ # scenario_temporaire0.Activate()
+ #scenario_UC.Apply(0)
+
+ app.SaveAsScenario('Case_' + cas, 1) # creer scenario pour sauvegarder le cas de base
+ scenario = app.GetActiveScenario()
+ scenario.Activate()
+
+
+ opf = app.GetFromStudyCase('ComOpf')
+ opf.iInit = 0
+
+
+ erropf = opf.Execute()
+ # Traitement specifique pour resoudre des cas difficle a converger
+ if (erropf == 1) and (PFParams['OBJECTIVE_FUNCTION'] == 'MINIMISATION_OF_COST') and PFParams['NON_COST_OPTIMAL_SOLUTION_ALLOWED']:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ ldf = app.GetFromStudyCase('ComLdf')
+ ldf.iopt_initOPF = 1 # utiliser pour OPF
+ ldf.Execute()
+ opf.iInit = 1
+ erropf = opf.Execute() # lancer opf avec 'cst'
+ print(' Run LDF for OPF ')
+ if erropf == 0: print(' OK grace a LDF initial ')
+ else:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ aa = 0
+ while erropf == 1: # si cst ne marche pas
+ scenario_temporaire0.Apply(0)#recuperer scenario initiale
+ aa += 1
+ opf.iopt_obj = 'los' # Fonction objectif = minimisation de la perte totale du reseau
+ erropf = opf.Execute() # run opf los
+ if erropf == 1:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ print(' flat-stat to OPF loss ! ! ! ')
+ opf.iInit = 0 # flatstart opf loss
+ erropf = opf.Execute()
+ if erropf == 1:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ break
+ opf.iInit = 1
+ print(' Run OPF loss OK ')
+ if erropf == 0: # si los marche bien
+ if (aa == 2)and(LS_allowed):
+ opf.iopt_obj = 'shd'
+ opf.Execute()
+ if aa == 3:
+ # print(' ++++++++++++++++++++++++++++prendre le resultat du OPF LOSS')
+ # erropf = 1
+ # scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_LOSS' + '.shd', 'w')
+ #filew = open(tempdir + '/Case_' + str(nn)+'_LOSS' + '.shd','w')
+ filew.write('Case_' + str(nn))
+ filew.close()
+ break
+ opf.iopt_obj = 'cst'
+ erropf = opf.Execute() # relancer opt cst
+ if erropf == 0:
+ if (aa == 2)and(LS_allowed):
+ print(' ==================== basculer los-shd')
+ else:
+ print(' OK grace a OPF LOSS =======================LOSS in case aa=' + str(aa))
+ if (erropf==1)and(LS_allowed):
+ aa = 0
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ ldf.Execute() # initiale valeur pour opf shd
+ # opf.iInit = 1
+ while erropf == 1:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ aa += 1
+ opf.iopt_obj = 'shd' # Fonction objectif = minimisation de la perte totale du reseau
+ erropf = opf.Execute()
+ if erropf == 1:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ print(' flat-stat to OPF shd ! ! ! 222 ')
+ opf.iInit = 0
+ erropf = opf.Execute()
+ if erropf == 1:
+ scenario_temporaire0.Apply(0) # recuperer scenario initiale
+ break
+ opf.iInit = 1
+ print(' Run OPF SHD ')
+ if erropf == 0: # si shd marche bien
+ if aa == 2:
+ opf.iopt_obj = 'los'
+ opf.Execute()
+ if aa == 3:
+ print(' +++++++++++++++++++++++++prendre le resultat du OPF SHD')
+ filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_SHD' + '.shd','w')
+ #filew = open(tempdir + '/Case_' + str(nn)+'_SHD' + '.shd','w')
+ filew.write('Case_' + str(nn))
+ filew.close()
+ break
+ opf.iopt_obj = 'cst'
+ erropf = opf.Execute() # relancer opt cst
+ if erropf == 0:
+ if aa == 2:
+ print('=== ========== basculer shd-los')
+ # filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(
+ # nn) + '_shdlosscost' + '.shd', 'w')
+ # filew.write('Case_' + str(nn))
+ # filew.close()
+ else:
+ print(
+ ' OK grace a OPF SHD -------------------------------Load SHEDDING in case aa=' + str(
+ aa))
+ # filew = open( os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_shdcost' + '.shd', 'w')
+ # filew.write('Case_' + str(nn))
+ # filew.close()
+ # Fin du traitement specifique pour resoudre des cas difficle a converger
+
+ loadShed = [[], [], [], [], []]
+ fxshnt = [[], [], []]
+ indexLS = []
+ indexFS = []
+ indicLS = 0
+ indicFS = 0
+ flagLS = 0
+ flagFS = 0
+ ok = False
+
+ if erropf == 0:
+ ok = True
+ else:
+ ok = False
+
+ if ok == True:
+
+ all_inputs = read_pfd(app, prj.loc_name, recal=0)
+ stop = time.clock();
+ start = stop; # ++++++++++++++++
+ buses = []
+ [buses.append(bus[0:8]) for bus in all_inputs[0]]
+ lines = []
+ [lines.append(bus[0:11]) for bus in all_inputs[1]]
+ transf = []
+ [transf.append(bus[0:11]) for bus in all_inputs[2]]
+ plants = []
+ [plants.append(bus[0:11]) for bus in all_inputs[3]]
+ loads = []
+ [loads.append(bus[0:7]) for bus in all_inputs[4]]
+ shunt = []
+ [shunt.append(bus[0:7]) for bus in all_inputs[5]]
+ motors = []
+ [motors.append(bus[0:6]) for bus in all_inputs[6]]
+ transf3 = []
+ [transf3.append(bus[0:14]) for bus in all_inputs[7]]
+ swshunt = []
+ [swshunt.append(bus[0:6]) for bus in all_inputs[8]]
+
+ # Extraction of the load shedding quantities
+ for ii in range(len(loads)):
+
+ LSscale = loads[ii][6].GetAttribute('s:scale')
+ P_setpoint = loads[ii][6].GetAttribute('s:pini_set')
+ LS = (1-LSscale) * P_setpoint
+ if abs(LS)>0.1:
+ indexLS.append(ii)
+ flagLS = 1 # raise flag loadshedding
+ loadShed[0].append(position) # Position seems to correspond to the number of the case we are treating
+ loadShed[1].append(loads[ii][0]) #busnumber
+ loadShed[2].append(loads[ii][4]) #busname
+ loadShed[3].append(LS)
+ loadShed[4].append(loads[ii][1]) #remaining load (voltage rectified)
+
+# if (loads[ii][1] - loads_base[ii][1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
+# indexLS.append(ii)
+# flagLS = 1 # raise flag loadshedding
+#
+# loadShed[0].append(
+# position) # Position seems to correspond to the number of the case we are treating
+# loadShed[0].extend(['' for i in range(len(indexLS) - 1)])
+# loadShed[1].append(loads[ii][0])
+# loadShed[2].append(loads[ii][4])
+# loadShed[3].append(loads_base[ii][1] - loads[ii][1])
+# loadShed[4].append(loads[ii][1])
+
+
+ indicLS = sum(loadShed[3]) # sum all Effective MW loads
+ loadShed = list(zip(*loadShed)) # transpose the matrix
+
+
+
+ for ii in range(len(shunt)):
+ if (shunt[ii][1] - shunt_base[ii][
+ 1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
+ indexFS.append(ii)
+ flagFS = 1 # raise flag loadshedding
+ fxshnt[0].append(position) # Position seems to correspond to the number of the case we are treating
+ fxshnt[0].extend(['' for i in range(
+ len(indexFS) - 1)]) # why [0] ? Maybe it would be better to have 2 lists ? Or a dict ?
+ fxshnt[1].append(shunt[ii][0])
+ fxshnt[2].append(shunt[ii][2])
+ indicFS = sum(fxshnt[2]) # sum all Effective MW loads
+ fxshnt = list(zip(*fxshnt)) # transpose the matrix
+
+ # 3. Affiche Y
+ # sizeY4 = len(shunt)
+ y = np.zeros(2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY3 + 2 * sizeY6 + sizeY4 + sizeY8 + 3 * sizeY5 + 3 * sizeY7)
+ z = np.zeros(12) # np.zeros returns a new array of the given shape and type filled with zeros
+ rate_mat_index = Irate_num + 2
+ rate_mat_index_3w = Irate_num + 4
+ Ymac = np.zeros(sizeY0)
+ if ok:
+ # Creates the quantities of interest
+ for i in range(sizeY2):
+ if lines[i][rate_mat_index] > 100:
+ z[0] += 1 # Number of lines above 100% of their limits
+ for i in range(sizeY5):
+ if transf[i][rate_mat_index] > 100:
+ z[1] += 1 # Number of transformers above 100% of their limits
+ for i in range(sizeY7):
+ if transf3[i][rate_mat_index_3w] > 100:
+ z[1] += 1 # Add number of 3w transformers above 100% of their limits
+ for i in range(sizeY1):
+ if buses[i][2] > buses[i][5]:
+ z[2] += 1
+ if buses[i][2] < buses[i][4]:
+ z[2] += 1 # Number of buses outside of their voltage limits
+ for i in range(sizeY0):
+ z[3] += float(plants[i][3]) # Total active production
+ for i in range(sizeY3):
+ z[4] += float(loads[i][1]) # Total active consumption
+ for i in range(sizeY6):
+ z[4] += float(motors[i][1]) # add total active consumption from motors
+ z[5] = (z[3] - z[4]) / z[3] * 100 # Active power losses
+ for i in range(sizeY2):
+ if lines[i][rate_mat_index] > z[6]:
+ z[6] = lines[i][rate_mat_index] # Max flow in lines
+ for i in range(sizeY5):
+ if transf[i][rate_mat_index] > z[7]:
+ z[7] = transf[i][rate_mat_index] # Max flow in transformers
+ for i in range(sizeY7):
+ if transf[i][rate_mat_index] > z[7]:
+ z[7] = transf3[i][rate_mat_index_3w] # Max flow in 3w transformers
+ for i in range(sizeY2):
+ if lines[i][rate_mat_index] > 90:
+ z[8] += 1
+ z[8] = z[8] - z[0] # Number of lines between 90% and 100% of their limits
+ for i in range(sizeY5):
+ if transf[i][rate_mat_index] > 90:
+ z[9] += 1
+ for i in range(sizeY7):
+ if transf3[i][rate_mat_index_3w] > 90:
+ z[9] += 1
+ z[9] = z[9] - z[1] # Number of transformers between 90% and 100% of their limits
+
+ z[10] = indicFS
+ z[11] = indicLS
+
+ # Creates the output vectors
+ for Pmach in range(sizeY0):
+ y[Pmach] = float(plants[Pmach][3])
+ Ymac[Pmach] = float(plants[Pmach][3])
+ for Qmach in range(sizeY0):
+ y[Qmach + sizeY0] = float(plants[Qmach][4])
+ for Vbus in range(sizeY1):
+ y[Vbus + 2 * sizeY0] = float(buses[Vbus][2])
+ for Iline in range(sizeY2):
+ y[Iline + 2 * sizeY0 + sizeY1] = float(lines[Iline][rate_mat_index])
+ for Pline in range(sizeY2):
+ y[Pline + 2 * sizeY0 + sizeY1 + sizeY2] = float(lines[Pline][6])
+ for Qline in range(sizeY2):
+ y[Qline + 2 * sizeY0 + sizeY1 + 2 * sizeY2] = float(lines[Qline][7])
+ for Itrans in range(sizeY5):
+ y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2] = float(transf[Itrans][rate_mat_index])
+ for Ptrans in range(sizeY5):
+ y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY5] = float(transf[Ptrans][6])
+ for Qtrans in range(sizeY5):
+ y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 2 * sizeY5] = float(transf[Qtrans][7])
+ for Itrans in range(sizeY7):
+ y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5] = float(transf3[Itrans][rate_mat_index_3w])
+ for Ptrans in range(sizeY7):
+ y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + sizeY7] = float(transf3[Ptrans][8])
+ for Qtrans in range(sizeY7):
+ y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 2 * sizeY7] = float(transf3[Qtrans][9])
+ for Pload in range(sizeY3):
+ y[Pload + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7] = float(loads[Pload][1])
+ for Pmotor in range(sizeY6):
+ y[Pmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3] = float(
+ motors[Pmotor][1])
+ for Qmotor in range(sizeY6):
+ y[Qmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + sizeY6] = float(
+ motors[Qmotor][2])
+ for Qshunt in range(sizeY4):
+ y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6] = float(
+ shunt[Qshunt][4])
+ for Qshunt in range(sizeY8):
+ y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6 + sizeY4] = float(
+ swshunt[Qshunt][4])
+
+ saveOPFresults(plants)
+# if opf.iopt_obj=='shd':# and indicLS > 0.1*len(loads_base):
+# # for ind in indexLS: # only act on loads that have been shed
+# # load = loads_base[ind]
+# # #if load[11].iShedding == 1: # if loadshedding allowed on the bus
+# for ind,load in enumerate(loads_base):
+# try: #disactivate triggers, save results
+# loadPscale = load[6].GetChildren(1, 'plini.Charef', 1)
+# loadQscale = load[6].GetChildren(1, 'qlini.Charef', 1)
+# loadPscale[0].outserv = 1
+# loadQscale[0].outserv = 1
+# load[6].plini = loads[ind][1]
+# load[6].qlini = loads[ind][2]
+# except:
+# pass
+
+
+ scenario.Save()
+
+
+
+ # if len(gen_UC_list) == 0:
+ scenario_temporaire0.Delete()
+ res_final = [list(y), list(z), list(Ymac), indicLS, indicFS, list(loadShed),
+ list(fxshnt)] # sauvegarder le resultat dans un fichier pickle
+ with open(dico['doc_base'] + '/' + app.GetActiveStudyCase().loc_name + '.final', 'wb') as fichier:
+ mon_pickler = pickle.Pickler(fichier, protocol=2)
+ mon_pickler.dump(res_final)
+
+stop = time.clock();print(' run study cases'+str(len(_cas))+' in correct_comtask.py in ' + str(round(stop - start, 3)) + ' seconds');start = stop;
+# app.Show()
+# aa=1
--- /dev/null
+#-------------------------------------------------------------------------------
+# Name: module1
+# Purpose:
+#
+# Author: j15773
+#
+# Created: 09/06/2016
+# Copyright: (c) j15773 2016
+# Licence: <your licence>
+#-------------------------------------------------------------------------------
+
+import os
+import sys
+import numpy as np
+from support_functions import *
+
+NetworkFile=r"C:\Users\j15773\Documents\GTDosier\PSEN\Versions\PSEN_V13 - ec dispatch\Test Case ECD\JPS Network 2019 - half load.sav"
+PSSE_PATH=r"C:\Program Files (x86)\PTI\PSSE33\PSSBIN"
+ecd_file = r"C:\Users\j15773\Documents\GTDosier\PSEN\Versions\PSEN_V13 - ec dispatch\Test Case ECD\Jam19_ECD.ecd"
+
+sys.path.append(PSSE_PATH)
+os.environ['PATH'] += ';' + PSSE_PATH + ';'
+
+import psspy
+import redirect
+
+###initialization PSSE
+psspy.psseinit(10000)
+_i=psspy.getdefaultint()
+_f=psspy.getdefaultreal()
+_s=psspy.getdefaultchar()
+redirect.psse2py()
+
+# Silent execution of PSSe
+islct=6 # 6=no output; 1=standard
+psspy.progress_output(islct)
+
+def EconomicDispatch(NetworkFile, ecd_file, LossesRatio, TapChange):
+
+ #Network File
+ psspy.case(NetworkFile)
+ psspy.save(NetworkFile)
+
+ #read contents
+ all_inputs_base=read_sav(NetworkFile)
+ buses_base=all_inputs_base[0]
+ plants_base=all_inputs_base[3]
+ loads_base=all_inputs_base[4]
+ motors_base=all_inputs_base[6]
+
+ #TotalLoad
+ P_load = 0
+ for load in loads_base:
+ P_load += load[1]
+ for motor in motors_base:
+ P_load+= motor[1]
+
+ #total gen not in ecd file
+ f = open(ecd_file,'r')
+ ecd_lines = f.readlines()
+ ecd_genlist = []
+ for line in ecd_lines:
+ line = line.split('\t')
+ busnum = int(line[0])
+ genid = line[1].strip()
+ ecd_genlist.append((busnum,genid))
+ f.close()
+
+ P_nondisp = 0
+ P_disp = 0
+ for gen in plants_base:
+ busnum = gen[0]
+ genid = gen[2].strip()
+ pgen = gen[3]
+ if (busnum,genid) in ecd_genlist:
+ P_disp+=pgen
+ else:
+ P_nondisp+=pgen
+ print P_disp
+ print P_nondisp
+ psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
+ ierr1 = psspy.ecdi(3,1,1,ecd_file,1,[0.0,0.0])
+ ierr2 = psspy.ecdi(3,1,2,ecd_file,1,[0.0,0.0])
+ ierr3 = psspy.ecdi(3,1,3,ecd_file,0,[P_load*(1+LossesRatio) - P_nondisp,0.0])
+ ierr4 = psspy.ecdi(3,1,4,ecd_file,1,[0.0,0.0])
+
+ EcdErrorCodes = [ierr1,ierr2,ierr3,ierr4]
+
+ # Newton-Raphson power flow calculation. Params:
+ # tap adjustment flag (0 = disable / 1 = enable stepping / 2 = enable direct)
+ # area interchange adjustement (0 = disable)
+ # phase shift adjustment (0 = disable)
+ # dc tap adjustment (1 = enable)
+ # switched shunt adjustment (1 = enable)
+ # flat start (0 = default / disabled, 1 = enabled), disabled parce qu'on n'est pas dans une situation de d?part
+ # var limit (default = 99, -1 = ignore limit, 0 = apply var limit immediatly)
+ # non-divergent solution (0 = disable)
+ psspy.fnsl([TapChange, _i, _i, _i, _i, _i, _i,_i]) # Load flow Newton Raphson
+ LFcode = psspy.solved()
+
+ #check to see if swing bus outside limits
+ Plimit = False
+ Qlimit = False
+ for bus in buses_base:
+ bustype = int(bus[6])
+ if bustype==3: #swing bus
+ swingbusnum = int(bus[0])
+ for gen in plants_base:
+ busnum = gen[0]
+ if busnum == swingbusnum:
+ machid = gen[2]
+ pmax = gen[6]
+ qmax = gen[7]
+ pmin = gen[9]
+ qmin = gen[10]
+ ierr, pgen = psspy.macdat(busnum,machid,'P')
+ ierr, qgen = psspy.macdat(busnum,machid,'Q')
+ if pgen > pmax or pgen < pmin:
+ Plimit = True
+ if qgen > qmax or qgen < qmin:
+ Qlimit = True
+ psspy.save(NetworkFile)
+ return EcdErrorCodes, LFcode, Plimit, Qlimit
+
+EcdErrorCodes, LFcode, Plimit, Qlimit = EconomicDispatch(NetworkFile, ecd_file, 0.026, 1)
--- /dev/null
+import PSENconfig # file with Eficas output dictionaries
+import os,sys,pickle
+# from support_functionsPF import *#Valentin
+from support_functionsPF import read_pfd,np
+# sys.path.append(PSENconfig.Dico['DIRECTORY']['PF_path'])#Valentin
+# os.environ['PATH'] += ';' + os.path.dirname(os.path.dirname(PSENconfig.Dico['DIRECTORY']['PF_path'])) + ';'#Valentin
+
+PFParams = PSENconfig.Dico['PF_PARAMETERS']
+
+import powerfactory
+
+app = powerfactory.GetApplication()
+# app.Show()
+user = app.GetCurrentUser()
+ComImp = user.CreateObject('ComPFDIMPORT')
+
+app.SetWriteCacheEnabled(1) # Disable consistency check
+ComImp.g_file = PSENconfig.Dico['DIRECTORY']['pfd_file']
+ComImp.g_target = user # project is imported under the user account
+err = ComImp.Execute() # Execute command starts the import process
+app.SetWriteCacheEnabled(0) # Enable consistency check
+if err:
+ app.PrintError('Project could not be imported...')
+ exit()
+prjs = user.GetContents('*.IntPrj')
+prjs.sort(key=lambda x: x.gnrl_modif, reverse=True)
+prj = prjs[0]
+app.ActivateProject(prj.loc_name)
+prj = app.GetActiveProject()
+studycase = app.GetActiveStudyCase()
+studycase.loc_name = 'BaseCase'
+app.PrintPlain('Project %s has been successfully imported.' % prj)
+ComImp.Delete()
+# stop = time.clock(); print('Imptor file first time ' + str(round(stop - start, 3)) + ' seconds'); start = stop;#++++++++++++++++
+#read sav
+all_inputs_init=read_pfd(app,prj.loc_name,recal=1)
+
+# all_inputs_base=read_pfd(Paths['pfd_file'])
+all_inputs_base=all_inputs_init
+buses_base=[]
+[buses_base.append(bus[0:8]) for bus in all_inputs_base[0]]
+lines_base = []
+[lines_base.append(bus[0:11]) for bus in all_inputs_base[1]]
+trans_base = []
+[trans_base.append(bus[0:11]) for bus in all_inputs_base[2]]
+plants_base = []
+[plants_base.append(bus[0:11]) for bus in all_inputs_base[3]]
+loads_base = []
+[loads_base.append(bus[0:6]) for bus in all_inputs_base[4]]
+shunt_base = []
+# shunt_base=all_inputs_base[5]
+[shunt_base.append(bus[0:6]) for bus in all_inputs_base[5]]
+motors_base = []
+# motors_base=all_inputs_base[6]
+[motors_base.append(bus[0:6]) for bus in all_inputs_base[6]]
+trans3_base = []
+# trans3_base=all_inputs_base[7]
+[trans3_base.append(bus[0:14]) for bus in all_inputs_base[7]]
+swshunt_base = []
+# swshunt_base=all_inputs_base[8]
+[swshunt_base.append(bus[0:6]) for bus in all_inputs_base[8]]
+########///////////////////////////////////////////////////////////##########
+filer=open('temp1.txt','r')
+_path=[]
+for line in filer:
+ _path.append(line)
+filer.close()
+path_save = _path[0].replace('\n','')
+ldf = app.GetFromStudyCase('ComLdf')
+ldf.iopt_net = 0 # AC load flow
+ldf.iopt_at = 1 # automatic tap transfos
+ldf.iopt_asht = 1 # automatic shunt
+ldf.iopt_lim = 1 # reactive power limit
+ldf.iopt_plim = 1 # active power limit
+ldf.iopt_limScale = 1 # scale factor
+ldf.iopt_noinit = 1 # no initialisation load flow
+ldf.iopt_initOPF = 0 # utiliser pour OPF
+ldf.iShowOutLoopMsg = 0 # show off output
+ldf.iopt_show = 0 # show off output
+ldf.iopt_check = 0 # show off output
+ldf.iopt_chctr = 0 # show off output
+
+####OPF Parametrisation
+opf = app.GetFromStudyCase('ComOpf')
+opf.iopt_ACDC = 0 # AC OPF sans contingences
+
+#OPF Controls
+#opf.iopt_pd = 1 # dispatche de puissance active 1: OUI
+#opf.iopt_qd = 1 # Contrôle dispatch de puissance réactive des générateurs 1: OUI
+TapChange = 1 - int(PFParams['LOCK_TAPS']) # 0 if locked, 1 if stepping
+opf.iopt_trf = TapChange # Position prise tranfo 1: OUI
+#opf.iopt_sht = 1 # shunts commutables 0: NON
+
+#OPF Constraints
+#opf.iopt_brnch = 1 # Contrainte limite flux branche 1: OUI
+#opf.iopt_genP = 1 # Contrainte limite puissance active générateurs 1: OUI
+#opf.iopt_genQ = 1 # Contrainte limite puissance réactive générateurs 1: OUI
+#opf.iop_bus = 1 # contraintes de tension sur jeux de barres 0: NON
+#opf.iopt_add = 0 # Contrainte limite flux frontières :0 NON
+
+opf.iInit = 0 #OPF initialisation
+opf.iitr = int(PFParams['ITERATION_INTERIOR']) # controle du nombre d'iterations boucles intérieures
+opf.iitr_outer = 30 # controle du nombre d'iterations boucles externes.
+if PFParams['ALGORITHM'] == 'Optimum Power Flow':
+ if PFParams['OBJECTIVE_FUNCTION'] == 'MINIMISATION_OF_COST':
+ opf.iopt_obj = 'cst' # Fonction objectif = minimisation des coûts
+ elif PFParams['OBJECTIVE_FUNCTION'] == 'LOADSHEDDING_COSTS':
+ opf.iopt_obj = 'shd' # Fonction objectif = minimisation loadshedding cout
+ elif PFParams['OBJECTIVE_FUNCTION'] == 'MINIMISATION_OF_LOSSES':
+ opf.iopt_obj = 'los' # Fonction objectif = minimisation de la perte totale du réseau
+ elif PFParams['OBJECTIVE_FUNCTION'] == 'MAXIMISATION_MVAR_RESERVE':
+ opf.iopt_obj = 'rpr' # Fonction objectif = minimisation de la perte totale du réseau
+
+ # creer trigger
+ # preparation, effacer les anciens caracteristiques
+ fOplib = app.GetProjectFolder('oplib') # Dossier contient dossier caracteristique
+ fChar = app.GetProjectFolder('chars') # Dossier contient triggers
+ if fChar == None:
+ fChar = fOplib.GetChildren(1, 'Characteristics.IntPrjfolder', 1)
+ if fChar == []:
+ fChar = fOplib.CreateObject('IntPrjfolder', 'Characteristics')
+ fChar.iopt_typ = 'chars'
+ else:
+ fChar = fChar[0]
+ fChar.iopt_typ = 'chars'
+ fScale = fChar.GetChildren(1, '*.IntScales')
+ if fScale == []:
+ fScale = fChar.CreateObject('IntScales')
+ else:
+ fScale = fScale[0]
+ trifiles = fScale.GetChildren(1, '*.TriFile', 1)
+ for trifile in trifiles:
+ trifile.Delete()
+ chavecs = fChar.GetChildren(1, '*.ChaVecFile', 1)
+ for chavec in chavecs:
+ chavec.Delete()
+ fCase = app.GetActiveStudyCase()
+ settriggers = fCase.GetChildren(1, '*.SetTrigger', 1)
+ for settriger in settriggers:
+ settriger.Delete()
+
+app.SaveAsScenario('Base', 1) # creer scenario pour sauvegarder le cas de base
+scenario_temporaire = app.GetActiveScenario()
+scenario_temporaire.Save()
+scenario_temporaire.Deactivate()
+
+ComExp = user.CreateObject('ComPfdExport')
+app.SetWriteCacheEnabled(1) # Disable consistency check
+ComExp.g_objects = [prj] # define the project to be exported
+ComExp.g_file = os.path.join(path_save, "BaseCase.pfd")
+err = ComExp.Execute() # Command starts the export process
+if err:
+ app.PrintError('Project could not be exported...')
+ exit()
+app.SetWriteCacheEnabled(0) # Enable consistency check
+# app.PrintPlain('Project %s has been successfully exported to BaseCase.' % prj)
+print(prj)
+print(prj.loc_name)
+ComExp.Delete()
+prj.Delete()
+
+# buses_base,lines_base,trans_base,plants_base,loads_base,shunt_base,motors_base,trans3_base,swshunt_base
+# sauvegarder le resultat dans un fichier pickle
+res_final=[buses_base,lines_base,trans_base,plants_base,loads_base,shunt_base,motors_base,trans3_base,swshunt_base]
+with open('param_base', 'wb') as fichier:
+ mon_pickler = pickle.Pickler(fichier, protocol=2)
+ mon_pickler.dump(res_final)
+# aa=1
\ No newline at end of file
--- /dev/null
+##########################################
+# ojectif de ce module: lancer le calcul parallele
+##########################################
+
+import time
+import PSENconfig # file with Eficas output dictionaries
+import os,sys,pickle
+# from support_functionsPF import *#Valentin
+from support_functionsPF import read_pfd,np
+from math import *
+import csv
+
+stop = time.clock(); start = stop;
+PFParams = PSENconfig.Dico['PF_PARAMETERS']
+with open(os.path.dirname(os.path.realpath(__file__))+'/data_dico', 'rb') as fichier:
+ mon_depickler = pickle.Unpickler(fichier)
+ dico = mon_depickler.load()
+x=dico['inputSamp']
+
+position = dico['position']
+# timeVect = dico['timeVect']
+LawsList = dico['CorrMatrix']['laws']
+N_1_LINES = dico['N_1_LINES']
+N_1_TRANSFORMERS = dico['N_1_TRANSFORMERS']
+N_1_MOTORS = dico['N_1_MOTORS']
+N_1_LOADS = dico['N_1_LOADS']
+N_1_GENERATORS = dico['N_1_GENERATORS']
+# inputSample = []
+# x_copy = []
+# #############################################################################/
+import powerfactory
+app = powerfactory.GetApplication()
+stop = time.clock(); print(' A0 in run_in_PFfunction.py in ' + str( round(stop - start, 3)) + ' seconds'); start = stop;
+user = app.GetCurrentUser()
+if dico['position'] == 0:
+ ComImp = user.CreateObject('ComPFDIMPORT')
+ app.SetWriteCacheEnabled(1) # Disable consistency check
+ ComImp.g_file = os.path.join(dico['doc_base'], 'BaseCase.pfd')
+ ComImp.g_target = user # project is imported under the user account
+ err = ComImp.Execute() # Execute command starts the import process
+ ComImp.Delete()
+ app.SetWriteCacheEnabled(0) # Enable consistency check
+prjs = user.GetContents('*.IntPrj')
+prjs.sort(key=lambda x: x.gnrl_modif, reverse=True)
+prj = prjs[0]
+prj.Activate()
+
+#############################################################################/
+fOplib = app.GetProjectFolder('oplib') # Dossier contient dossier caracteristique
+fChar = app.GetProjectFolder('chars') # Dossier contient triggers
+fScale = fChar.GetChildren(1, '*.IntScales')[0]
+fScen = app.GetProjectFolder('scen') # Dossier contient triggers
+studycase0 = prj.GetContents('BaseCase.IntCase', 1)[0] # app.GetActiveStudyCase()
+studycase0.Activate()
+scen = fScen.GetChildren(1, 'Base.IntScenario', 1)[0]
+scen.Activate()
+settrigger0 = studycase0.GetChildren(1, 'set_iteration.SetTrigger', 1)
+if settrigger0:
+ settrigger0[0].outserv=1
+fold = studycase0.fold_id
+all_inputs_init = read_pfd(app, prj.loc_name, recal=1)
+scen.Deactivate()
+stop = time.clock(); print(' A1 in run_in_PFfunction.py in ' + str( round(stop - start, 3)) + ' seconds'); start = stop;
+# print('read_pfd before loop ' + str(round(stop - start, 3)) + ' seconds');
+# start = stop; # ++++++++++++++++
+loads_base = all_inputs_init[4]
+plants_base = all_inputs_init[3]
+lines_base = all_inputs_init[1]
+transf_base = all_inputs_init[2]
+transf3_base = all_inputs_init[7]
+motors_base = all_inputs_init[6]
+
+## on ecrit les pgini initiaux (avant trigger) ds un fichier csv
+#initial_pginis = []
+#for plant in plants_base:
+# initial_pginis.append(plant[11].pgini)
+#
+#csvfile = os.path.join(dico['doc_base'], 'initial_pgini.csv')
+#g = open(csvfile,"wb")
+#cwt = csv.writer(g, delimiter=";")
+#for ipgini in initial_pginis:
+# cwt.writerow(ipgini)
+#g.close()
+
+
+
+trifiles = fScale.GetChildren(1, '*.TriFile', 1)
+stop = time.clock(); print(' A2 in run_in_PFfunction.py in ' + str( round(stop - start, 3)) + ' seconds'); start = stop;
+# creer trifile seulement une fois, au premier package
+if dico['position'] == 0:
+ for trifile in trifiles:
+ trifile.Delete()
+ chavecs = fChar.GetChildren(1, '*.ChaVecFile', 1)
+ for chavec in chavecs:
+ chavec.Delete()
+ settriggers = studycase0.GetChildren(1, '*.SetTrigger', 1)
+ for settriger in settriggers:
+ settriger.Delete()
+ tri1 = fScale.CreateObject('TriFile')
+ tri1.loc_name = 'set_iteration'
+ tri1.iopt_time = 1
+ tri1.unit = '1'
+ settriger = studycase0.CreateObject('SetTrigger', 'set_iteration')
+ # settriger= studycase0.GetChildren(1, 'set_iteration.SetTrigger', 1)[0]
+ settriger.ptrigger = tri1
+ effacers = studycase0.GetContents('*.ComPython', 0)
+ for effacer in effacers:
+ effacer.Delete()
+ compython0 = studycase0.CreateObject('ComPython', 'comp0')
+ compython0.filePath = os.path.dirname(os.path.realpath(__file__)) + '/comfile.py'
+ effacers = fold.GetContents('*.Comtasks', 0)
+ for effacer in effacers:
+ effacer.Delete()
+ comtask = fold.CreateObject('ComTasks')
+else:
+ stop = time.clock();
+ print(' A3 in run_in_PFfunction.py in ' + str(round(stop - start, 3)) + ' seconds');
+ start = stop;
+ tri1 = fScale.GetChildren(1, 'set_iteration.TriFile', 1)[0]
+ stop = time.clock();
+ print(' A4 in run_in_PFfunction.py in ' + str(round(stop - start, 3)) + ' seconds');
+ start = stop;
+ settriger = studycase0.GetChildren(1, 'set_iteration.SetTrigger', 1)[0]
+ stop = time.clock();
+ print(' A5 in run_in_PFfunction.py in ' + str(round(stop - start, 3)) + ' seconds');
+ start = stop;
+ comtask = fold.GetContents('*.Comtasks', 0)[0]
+ comtask.Delete()
+ stop = time.clock(); print(' A6 in run_in_PFfunction.py in ' + str(round(stop - start, 3)) + ' seconds'); start = stop;
+ comtask = fold.CreateObject('ComTasks')
+ # comtask.RemoveStudyCases()
+stop = time.clock(); print(' A7 in run_in_PFfunction.py in ' + str( round(stop - start, 3)) + ' seconds'); start = stop;
+lenlaw = len(x[0]) - 1 # nombre de laws
+nameN1 = [] # nom des elements N_1
+for N1 in N_1_LINES:
+ nameN1.append(N1)
+for N1 in N_1_TRANSFORMERS:
+ nameN1.append(N1)
+for N1 in N_1_MOTORS:
+ nameN1.append(N1)
+for N1 in N_1_LOADS:
+ nameN1.append(N1)
+for N1 in N_1_GENERATORS:
+ nameN1.append(N1)
+
+charefs = prj.GetChildren(1, '*.ChaRef', 1)
+for charef in charefs:
+ charef.Delete()
+stop = time.clock(); print(' Prepare chavecfile and characteristic in run_in_PFfunction.py in ' + str( round(stop - start, 3)) + ' seconds'); start = stop;
+ # Begin creer chavecfile et les caracteristiques
+for i, law in enumerate(LawsList):
+ if law != 'N_1_fromFile':
+ if dico['Laws'][law]['ComponentType'] == 'Generator' and 'Level' in dico['Laws'][law][
+ 'Type']: # niveau de puissance
+ if dico['Laws'][law]['TransferFunction'] == True:
+ if dico['Laws'][law]['TF_Input'] == '.pow file':
+ z_WS = dico['Laws'][law]['Wind_Speed_Measurement_Height']
+ pathWT = dico['Laws'][law]['File_Name']
+ HH = dico['Laws'][law]['Hub_Height']
+ alpha = dico['Laws'][law]['AlphaWS']
+ PercentLoss = dico['Laws'][law]['Percent_Losses']
+ x_copy[ite][i] = eol(np.array([x[ite][i]]), z_WS, pathWT, HH, alpha, PercentLoss)[0]
+ # x_copy[ite][i]=x[ite][i]
+ elif dico['Laws'][law]['TF_Input'] == 'tuples list':
+ x_copy[ite][i] = applyTF(x[ite][i], dico['Laws'][law]['TF_Values'])
+ # else: # ensure values are between 0 and 1
+ # Pval = x[ite][i]
+ # Pval = min(Pval, 1)
+ # Pval = max(Pval, 0)
+ # x_copy[ite][i] = Pval
+ ###################=======================================
+ if dico['Laws'][law]['ComponentType'] == 'Load' and ('Unavailability' not in dico['Laws'][law]['Type']):
+ LoadList = dico['Laws'][law]['Load']
+ for LoadName in LoadList: # plusieurs loads possible
+ busNum = dico['Loads'][LoadName]['NUMBER']
+ ID = dico['Loads'][LoadName]['ID']
+ P = dico['Loads'][LoadName]['P']
+ Q = dico['Loads'][LoadName]['Q']
+ for load in loads_base:
+ if (load[0] == busNum) and (load[5] == ID): # cree trigger
+ chavec_1 = fChar.CreateObject('ChaVecFile', 'Load_' + LoadName)
+ chavec_1.f_name = os.path.join(os.getcwd(),
+ 'data_trigger.csv') # fichier .csv de la caracteristique
+ chavec_1.usage = 1
+ chavec_1.datacol = i + 2
+ chavec_1.scale = tri1
+ load[6].plini = load[6].plini
+ ref = load[6].CreateObject('charef', 'plini')
+ ref.typ_id = chavec_1
+# refP = load[6].GetChildren(1, 'plini.Charef',1)
+# refP[0].outserv = 0
+ ref = load[6].CreateObject('charef', 'qlini')
+ ref.typ_id = chavec_1
+# refQ = load[6].GetChildren(1, 'qlini.Charef',1)
+# refQ[0].outserv = 0
+ break
+
+
+ # Motor Load Law: change the values of the different induction motor loads and treat large changes of load to help convergence
+ # if dico['Laws'][law]['ComponentType']=='Motor' and ('N_1' not in law) and ('out' not in law.lower()):
+ if dico['Laws'][law]['ComponentType'] == 'Motor' and ('Unavailability' not in dico['Laws'][law]['Type']):
+ MotorList = dico['Laws'][law]['Motor']
+ # if x_copy[ite][i] > 0.75: # On change directement l(es) charge(s)
+ for MotorName in MotorList:
+ busNum = dico['Motors'][MotorName]['NUMBER']
+ ID = dico['Motors'][MotorName]['ID']
+ Pmax = dico['Motors'][MotorName]['PMAX']
+ for motor in motors_base:
+ if (motor[0] == busNum) and (motor[5] == ID): # cree trigger
+ chavec_1 = fChar.CreateObject('ChaVecFile', 'Mo_' + MotorName)
+ chavec_1.f_name = os.path.join(os.getcwd(),
+ 'data_trigger.csv') # fichier .csv de la caracteristique
+ chavec_1.usage = 1
+ chavec_1.datacol = i + 2
+ chavec_1.scale = tri1
+ motor[6].pgini = Pmax
+ ref = motor[6].CreateObject('charef', 'pgini')
+ ref.typ_id = chavec_1
+ break
+
+ # Generator Law : Change generation level
+ # if dico['Laws'][law]['ComponentType']=='Generator' and ('N_1' not in law) and ('out' not in law.lower()):
+ if dico['Laws'][law]['ComponentType'] == 'Generator' and ('Unavailability' not in dico['Laws'][law]['Type']):
+ GenList = dico['Laws'][law]['Generator']
+ for GenName in GenList:
+ busNum = dico['Generators'][GenName]['NUMBER']
+ ID = dico['Generators'][GenName]['ID']
+ Pmax = dico['Generators'][GenName]['PMAX']
+ # Pmin = dico['Generators'][GenName]['PMIN']
+ for plant in plants_base:
+ if (plant[0] == busNum) and (plant[2] == ID): # cree trigger
+ chavec_1 = fChar.CreateObject('ChaVecFile', 'Gen_' + GenName)
+ chavec_1.f_name = os.path.join(os.getcwd(),
+ 'data_trigger.csv') # fichier .csv de la caracteristique
+ chavec_1.usage = 1
+ chavec_1.datacol = i + 2
+ chavec_1.scale = tri1
+ plant[11].pgini = Pmax
+# ref = plant[11].CreateObject('charef', 'pgini')
+ ref = plant[11].CreateObject('charef', 'pgini') # CM
+ ref.typ_id = chavec_1
+ ref = plant[11].CreateObject('charef', 'qgini')
+ ref.typ_id = chavec_1
+ break
+
+ # Line or Transformer Unavailability Law: disconnect component if sample=0
+ elif dico['Laws'][law]['ComponentType'] == 'Line' or dico['Laws'][law][
+ 'ComponentType'] == 'Transformer':
+ compType = dico['Laws'][law]['ComponentType']
+ CompList = dico['Laws'][law][compType]
+ for Name in CompList:
+ from_bus = dico[compType + 's'][Name]['FROMNUMBER']
+ to_bus = dico[compType + 's'][Name]['TONUMBER']
+ ID = dico[compType + 's'][Name]['ID']
+ if compType == 'Line': # couper line
+ for line in lines_base:
+ if (from_bus == line[0]) and (to_bus == line[1]) and (line[10] == ID): # cree trigger
+ chavec_1 = fChar.CreateObject('ChaVecFile', 'Line_' + Name)
+ chavec_1.f_name = os.path.join(os.getcwd(),
+ 'data_trigger.csv') # fichier .csv de la caracteristique
+ chavec_1.usage = 2
+ chavec_1.datacol = i + 2
+ chavec_1.scale = tri1
+ line[11].outserv = line[11].outserv
+ ref = line[11].CreateObject('charef', 'outserv')
+ ref.typ_id = chavec_1
+ break
+ elif compType == 'Transformer': # couper transfor 2 winding
+ if dico[compType + 's'][Name]['#WIND'] == 2:
+ for tranf in transf_base:
+ if (from_bus == tranf[0]) and (to_bus == tranf[1]) and (tranf[10] == ID):
+ chavec_1 = fChar.CreateObject('ChaVecFile', 'Transf_' + Name)
+ chavec_1.f_name = os.path.join(os.getcwd(),
+ 'data_trigger.csv') # fichier .csv de la caracteristique
+ chavec_1.usage = 2
+ chavec_1.datacol = i + 2
+ chavec_1.scale = tri1
+ tranf[11].outserv = tranf[11].outserv
+ ref = tranf[11].CreateObject('charef', 'outserv')
+ ref.typ_id = chavec_1
+ break
+ elif dico[compType + 's'][Name]['#WIND'] == 3: # couper transfor 3 winding
+ three_bus = dico[compType + 's'][Name]['3NUMBER']
+ for tranf in transf3_base:
+ if (from_bus == tranf[0]) and (to_bus == tranf[1]) and (three_bus == tranf[2]) and (
+ tranf[13] == ID):
+ chavec_1 = fChar.CreateObject('ChaVecFile', 'Transf3_' + Name)
+ chavec_1.f_name = os.path.join(os.getcwd(),
+ 'data_trigger.csv') # fichier .csv de la caracteristique
+ chavec_1.usage = 2
+ chavec_1.datacol = i + 2
+ chavec_1.scale = tri1
+ tranf[14].outserv = tranf[14].outserv
+ ref = tranf[14].CreateObject('charef', 'outserv')
+ ref.typ_id = chavec_1
+ break
+ # x2.append(x_copy[ite][i]) # store values sampled for logger function
+
+ elif (dico['Laws'][law]['ComponentType'] == 'Generator' and (
+ 'Unavailability' in dico['Laws'][law]['Type'])) or \
+ (dico['Laws'][law]['ComponentType'] == 'Load' and (
+ 'Unavailability' in dico['Laws'][law]['Type'])) or \
+ (dico['Laws'][law]['ComponentType'] == 'Motor' and (
+ 'Unavailability' in dico['Laws'][law]['Type'])):
+ compType = dico['Laws'][law]['ComponentType']
+ CompList = dico['Laws'][law][compType]
+
+ for Name in CompList:
+ busNum = dico[compType + 's'][Name]['NUMBER']
+ ID = dico[compType + 's'][Name]['ID']
+ if compType == 'Generator':
+ for plant in plants_base:
+ if (plant[0] == busNum) and (plant[2] == ID): # cree trigger
+ chavec_1 = fChar.CreateObject('ChaVecFile', 'Gen_' + Name)
+ chavec_1.f_name = os.path.join(os.getcwd(),
+ 'data_trigger.csv') # fichier .csv de la caracteristique
+ chavec_1.usage = 2
+ chavec_1.datacol = i + 2
+ chavec_1.scale = tri1
+ plant[11].outserv = plant[11].outserv
+ ref = plant[11].CreateObject('charef', 'outserv')
+ ref.typ_id = chavec_1
+ break
+
+ elif compType == 'Load':
+ for load in loads_base:
+ if (load[0] == busNum) and (load[5] == ID): # cree trigger
+ chavec_1 = fChar.CreateObject('ChaVecFile', 'Load_' + Name)
+ chavec_1.f_name = os.path.join(os.getcwd(),
+ 'data_trigger.csv') # fichier .csv de la caracteristique
+ chavec_1.usage = 2
+ chavec_1.datacol = i + 2
+ chavec_1.scale = tri1
+ load[6].outserv = load[6].outserv
+ ref = load[6].CreateObject('charef', 'outserv')
+ ref.typ_id = chavec_1
+ break
+ elif compType == 'Motor':
+ for motor in motors_base:
+ if (motor[0] == busNum) and (motor[5] == ID): # cree trigger
+ chavec_1 = fChar.CreateObject('ChaVecFile', 'Mo_' + Name)
+ chavec_1.f_name = os.path.join(os.getcwd(),
+ 'data_trigger.csv') # fichier .csv de la caracteristique
+ chavec_1.usage = 2
+ chavec_1.datacol = i + 2
+ chavec_1.scale = tri1
+ motor[6].outserv = motor[6].outserv
+ ref = motor[6].CreateObject('charef', 'outserv')
+ ref.typ_id = chavec_1
+ break
+ #######wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww============
+ else: # law=='N_1_fromFile"
+ for line_name in N_1_LINES:
+ from_bus = dico['Lines'][line_name]['FROMNUMBER']
+ to_bus = dico['Lines'][line_name]['TONUMBER']
+ ID = dico['Lines'][line_name]['ID']
+ for line in lines_base:
+ if (from_bus == line[0]) and (to_bus == line[1]) and (line[10] == ID): # cree trigger
+ chavec_1 = fChar.CreateObject('ChaVecFile', 'Line_' + line_name)
+ chavec_1.f_name = os.path.join(os.getcwd(),
+ 'data_trigger.csv') # fichier .csv de la caracteristique
+ chavec_1.usage = 2
+ for i, name in enumerate(nameN1):
+ if line_name == name:
+ chavec_1.datacol = lenlaw + i + 2
+ break
+ chavec_1.scale = tri1
+ outs = line[11].GetChildren(1, 'outserv.Charef', 1)
+ for out in outs:
+ out.Delete() # s'il y a deja un trifile, effacer, prioriter N_1_fromfile
+ line[11].outserv = line[11].outserv
+ ref = line[11].CreateObject('charef', 'outserv')
+ ref.typ_id = chavec_1
+ break
+
+ for transfo_name in N_1_TRANSFORMERS:
+ from_bus = dico['Transformers'][transfo_name]['FROMNUMBER']
+ to_bus = dico['Transformers'][transfo_name]['TONUMBER']
+ ID = dico['Transformers'][transfo_name]['ID']
+ if dico['Transformers'][transfo_name]['#WIND'] == 2:
+ for tranf in transf_base:
+ if (from_bus == tranf[0]) and (to_bus == tranf[1]) and (tranf[10] == ID):
+ chavec_1 = fChar.CreateObject('ChaVecFile', 'Transf_' + transfo_name)
+ chavec_1.f_name = os.path.join(os.getcwd(),
+ 'data_trigger.csv') # fichier .csv de la caracteristique
+ chavec_1.usage = 2
+ for i, name in enumerate(nameN1):
+ if transfo_name == name:
+ chavec_1.datacol = lenlaw + i + 2
+ break
+ chavec_1.scale = tri1
+ outs = tranf[11].GetChildren(1, 'outserv.Charef', 1)
+ for out in outs:
+ out.Delete() # s'il y a deja un trifile, effacer, prioriter N_1_fromfile
+ tranf[11].outserv = tranf[11].outserv
+ ref = tranf[11].CreateObject('charef', 'outserv')
+ ref.typ_id = chavec_1
+ break
+
+ elif dico['Transformers'][transfo_name]['#WIND'] == 3: # couper transfor 3 winding
+ three_bus = dico['Transformers'][transfo_name]['3NUMBER']
+ for tranf in transf3_base:
+ if (from_bus == tranf[0]) and (to_bus == tranf[1]) and (three_bus == tranf[2]) and (
+ tranf[13] == ID):
+ chavec_1 = fChar.CreateObject('ChaVecFile', 'Transf_' + transfo_name)
+ chavec_1.f_name = os.path.join(os.getcwd(),
+ 'data_trigger.csv') # fichier .csv de la caracteristique
+ chavec_1.usage = 2
+ for i, name in enumerate(nameN1):
+ if transfo_name == name:
+ chavec_1.datacol = lenlaw + i + 2
+ break
+ chavec_1.scale = tri1
+ outs = tranf[14].GetChildren(1, 'outserv.Charef', 1)
+ for out in outs:
+ out.Delete()
+ tranf[14].outserv = tranf[14].outserv
+ ref = tranf[14].CreateObject('charef', 'outserv')
+ ref.typ_id = chavec_1
+ break
+
+ for motor_name in N_1_MOTORS:
+ busNum = dico['Motors'][motor_name]['NUMBER']
+ ID = dico['Motors'][motor_name]['ID']
+
+ for motor in motors_base:
+ if (motor[0] == busNum) and (motor[5] == ID): # cree trigger
+ chavec_1 = fChar.CreateObject('ChaVecFile', 'Mo_' + motor_name)
+ chavec_1.f_name = os.path.join(os.getcwd(),
+ 'data_trigger.csv') # fichier .csv de la caracteristique
+ chavec_1.usage = 2
+ for i, name in enumerate(nameN1):
+ if motor_name == name:
+ chavec_1.datacol = lenlaw + i + 2
+ break
+ chavec_1.scale = tri1
+ outs = motor[6].GetChildren(1, 'outserv.Charef', 1)
+ for out in outs:
+ out.Delete() # s'il y a deja un trifile, effacer, prioriter N_1_fromfile
+ motor[6].outserv = motor[6].outserv
+ ref = motor[6].CreateObject('charef', 'outserv')
+ ref.typ_id = chavec_1
+ break
+
+ for load_name in N_1_LOADS:
+ busNum = dico['Loads'][load_name]['NUMBER']
+ ID = dico['Loads'][load_name]['ID']
+ for load in loads_base:
+ if (load[0] == busNum) and (load[5] == ID): # cree trigger
+ chavec_1 = fChar.CreateObject('ChaVecFile', 'Load_' + load_name)
+ chavec_1.f_name = os.path.join(os.getcwd(),
+ 'data_trigger.csv') # fichier .csv de la caracteristique
+ chavec_1.usage = 2
+ for i, name in enumerate(nameN1):
+ if load_name == name:
+ chavec_1.datacol = lenlaw + i + 2
+ break
+ chavec_1.scale = tri1
+ outs = load[6].GetChildren(1, 'outserv.Charef', 1)
+ for out in outs:
+ out.Delete()
+ load[6].outserv = load[6].outserv
+ ref = load[6].CreateObject('charef', 'outserv')
+ ref.typ_id = chavec_1
+ break
+
+ for group_name in N_1_GENERATORS:
+ busNum = dico['Generators'][group_name]['NUMBER']
+ ID = dico['Generators'][group_name]['ID']
+ for plant in plants_base:
+ if (plant[0] == busNum) and (plant[2] == ID): # cree trigger
+ chavec_1 = fChar.CreateObject('ChaVecFile', 'Gen_' + group_name)
+ chavec_1.f_name = os.path.join(os.getcwd(),
+ 'data_trigger.csv') # fichier .csv de la caracteristique
+ chavec_1.usage = 2
+ for i,name in enumerate(nameN1):
+ if group_name == name:
+ chavec_1.datacol = lenlaw + i + 2
+ break
+ chavec_1.scale = tri1
+ outs = plant[11].GetChildren(1, 'outserv.Charef', 1)
+ for out in outs:
+ out.Delete()
+ plant[11].outserv = plant[11].outserv
+ ref = plant[11].CreateObject('charef', 'outserv')
+ ref.typ_id = chavec_1
+ break
+
+
+ # chemin=os.getcwd()
+stop = time.clock(); print(' Prepare chavec for N_1_fromfile in run_in_PFfunction.py in ' + str(round(stop - start, 3)) + ' seconds'); start = stop;
+print('======= BEGIN copy studycases'+' ==================')
+if settrigger0:
+ settrigger0[0].outserv=0
+for ite in range(len(x)):
+ # inputSample.append(np.array(x[ite]))
+ studycase = fold.AddCopy(studycase0, 'Case_'+str(position))
+ settriger_iter = studycase.GetChildren(1, 'set_iteration.SetTrigger', 1)[0]
+ settriger_iter.ftrigger = position
+ compy = studycase.GetContents('*.ComPython', 0)[0]
+ comtask.AppendStudyCase(studycase)
+ comtask.AppendCommand(compy)
+ position+=1
+if settrigger0:
+ settrigger0[0].outserv=1
+stop = time.clock();print(' Copy study case in run_in_PFfunction.py in ' + str(round(stop - start, 3)) + ' seconds');start = stop;
+err=comtask.Execute()
+
+# app.Show()
+aa=1
\ No newline at end of file
--- /dev/null
+from support_functions import *
+
+##import os,sys,random,string
+##import PSENconfig
+##sys.path.append(PSENconfig.Dico['DIRECTORY']['PSSPY_path'])
+##os.environ['PATH'] = PSENconfig.Dico['DIRECTORY']['PSSE_path'] + ";"+ os.environ['PATH']
+##import psspy
+
+ropfile = r'D:\DEWA Solar 2017\2018 DEWA peak_fullGCCIA.rop'
+savfile = r'D:\DEWA Solar 2017\2018 DEWA peak_fullGCCIA.sav'
+savfile2 = r'D:\DEWA Solar 2017\2018 DEWA peak_fullGCCIA2.sav'
+GenDispatchData, DispTableData, LinCostTables, QuadCostTables, PolyCostTables, GenReserveData, PeriodReserveData,AdjBusShuntData,AdjLoadTables = readOPFdata(ropfile)
+
+
+_i=psspy.getdefaultint()
+_f=psspy.getdefaultreal()
+_s=psspy.getdefaultchar()
+redirect.psse2py()
+#import pssdb
+psspy.psseinit(80000)
+
+# Silent execution of PSSe
+islct=6 # 6=no output; 1=standard
+psspy.progress_output(islct)
+
+psspy.case(savfile)
+
+NoDisconnectionAllowedTotal = []
+for res in PeriodReserveData:
+ ResNum = res[0]
+ ResLevel = res[1]
+ ResPeriod = res[2]
+ InService = res[3]
+ if InService == 0:
+ continue
+ ParticipatingUnits = res[4]
+ ParticipatingUnitsFull = []
+ NoDisconnectionAllowed = []
+ for unit in ParticipatingUnits:
+ busNum = unit[0]
+ ID = unit[1]
+
+ for gen in GenReserveData:
+ busNum2 = gen[0]
+ ID2 = gen[1]
+ if busNum==busNum2 and ID == ID2:
+ ramp =gen[2]
+ Pmax = gen[3]
+ break
+
+ for gen in GenDispatchData:
+ busNum3 = gen[0]
+ ID3 = gen[1]
+ if busNum==busNum3 and ID == ID3:
+ dispatch = gen[2]
+ dispTable = gen[3]
+ break
+
+ for dTable in DispTableData:
+ dispTable2 = dTable[0]
+ if dispTable == dispTable2:
+ PmaxTable = dTable[1]
+ PminTable = dTable[2]
+ FuelCostScaleCoef = dTable[3]
+ CurveType = dTable[4] #2 = piece wise linear,
+ Status = dTable[5]
+ CostTable = dTable[6]
+ break
+
+ for table in LinCostTables:
+ CostTable2 = table[0]
+ if CostTable2==CostTable:
+ numpoints = table[1]
+ points = table[2]
+ break
+
+ MaxContribution = min(ResPeriod * ramp, Pmax)
+
+ for i,[x,y] in enumerate(points):
+ if x > Pmax:
+ x1 = x
+ y1 = y
+ x0 = points[i-1][0]
+ y0 = points[i-1][1]
+ break
+ y_i = (y1 - y0)*Pmax/(x1-x0)
+
+ CostCoef = y_i / Pmax
+
+ ParticipatingUnitsFull.append([busNum, ID, Pmax, dispTable, MaxContribution, CostCoef])
+
+ ParticipatingUnitsFull.sort(key=lambda x: x[-1], reverse=False)
+ ReserveCapability = 0
+
+ for unit in ParticipatingUnitsFull:
+ MaxContribution = unit[4]
+ if ReserveCapability >= ResLevel:
+ break
+ else:
+ ReserveCapability += MaxContribution
+ dispTable = unit[3]
+ Pmax = unit[2]
+ busNum = unit[0]
+ ID = unit[1]
+ NoDisconnectionAllowed.append([busNum, ID])
+ Pmin = 0.12*Pmax
+ psspy.opf_apdsp_tbl(dispTable,[_i,_i,_i],[_f, Pmin,_f])
+
+ for grp in NoDisconnectionAllowed:
+ if grp not in NoDisconnectionAllowedTotal:
+ NoDisconnectionAllowedTotal.append(grp)
+
+ psspy.save(savfile2)
+
+
+
--- /dev/null
+# -*- coding: utf-8 -*-
+"""
+Created on Mon Jun 03 15:31:42 2013
+
+@author: B31272
+
+Fonctions de support
+"""
+import os,sys,random,string
+import PSENconfig
+sys.path.append(PSENconfig.Dico['DIRECTORY']['PSSPY_path'])
+os.environ['PATH'] = PSENconfig.Dico['DIRECTORY']['PSSE_path'] + ";"+ os.environ['PATH']
+import psspy
+import pssarrays
+import redirect
+
+
+import pdb
+import numpy as np
+from math import *
+from decimal import *
+from openturns import *
+from time import sleep, strftime, gmtime
+import multiprocessing
+from threading import Thread
+from Queue import Queue, Empty
+
+Debug = False #debug mode for PSSEFunct
+Disconnect_RES = False #disconnect renewable generators when generate 0 MW
+DEWA_PV_Qlimits = True #lower Q limits when P of renewable generators is < 0.2 Pmax
+ReserveCorrection = True #add Pmin to units that are necessary to satisfy reserve requirements so that they are not disconnected after unit commitment
+DisconnectThreshhold = 0.10 #seuil en per unit de la puissance active en dessous duquel on deconnecte des generateurs pour qu'ils ne participent ni à la reserve ni à la compensation reactive.
+#===============================================================================
+# DEFINITION DES FONCTIONS - CREATION OF THE FUNCTIONS
+#===============================================================================
+
+
+#read a ROP file containing all opf data
+def readOPFdata(RopFile):
+
+ fo = open(RopFile, 'r')
+ Lines = fo.readlines()
+ fo.close()
+
+ for i,line in enumerate(Lines):
+ if 'begin Generator Dispatch data' in line:
+ startgendisp = i+1
+ if 'End of Generator Dispatch data' in line:
+ endgendisp = i
+ if 'begin Active Power Dispatch Tables' in line:
+ startdisptable = i+1
+ if 'End of Active Power Dispatch Tables' in line:
+ enddisptable = i
+ if 'begin Piece-wise Linear Cost Tables' in line:
+ startlincosttable = i+1
+ if 'End of Piece-wise Linear Cost Tables' in line:
+ endlincosttable = i
+ if 'begin Piece-wise Quadratic Cost Tables' in line:
+ startquadcosttable = i+1
+ if 'End of Piece-wise Quadratic Cost Tables' in line:
+ endquadcosttable = i
+ if 'begin Polynomial Cost Tables' in line:
+ startpolycosttable = i+1
+ if 'End of Polynomial Cost Tables' in line:
+ endpolycosttable = i
+ if 'begin Generation Reserve data' in line:
+ startgenreservedata = i+1
+ if 'End of Generation Reserve data' in line:
+ endgenreservedata = i
+ if 'begin Period Reserve data' in line:
+ startperiodreservedata = i+1
+ if 'end of period reserve' in line.lower():
+ endperiodreservedata = i
+ if 'begin Adjustable Bus Shunt data' in line:
+ startadjbusshuntdata = i+1
+ if 'End of Adjustable Bus Shunt data' in line:
+ endadjbusshuntdata = i
+ if 'begin Adjustable Bus Load Tables' in line:
+ startadjloadtable = i+1
+ if 'End of Adjustable Bus Load Tables' in line:
+ endadjloadtable = i
+
+
+ GenDispatchData = []
+ for i in range(startgendisp,endgendisp):
+ data = Lines[i].split()
+ busNum = int(data[0])
+ ID = data[1]
+ dispatch = float(data[2])
+ dispTable = int(data[3])
+ GenDispatchData.append([busNum,ID,dispatch, dispTable])
+
+ DispTableData = []
+ for i in range(startdisptable,enddisptable):
+ data = Lines[i].split()
+ DispTable = int(data[0])
+ Pmax = float(data[1])
+ Pmin = float(data[2])
+ FuelCostScaleCoef = float(data[3])
+ CurveType = int(data[4]) #2 = piece wise linear,
+ Status = int(data[5])
+ CostTable = int(data[6])
+ DispTableData.append([DispTable,Pmax,Pmin,FuelCostScaleCoef,CurveType,Status,CostTable])
+
+ LinCostTables = []
+ i = startlincosttable
+ while i >= startlincosttable and i < endlincosttable:
+ headerdata = Lines[i].split()
+ CostTable = int(headerdata[0])
+ #tableName = str(headerdata[1])
+ numpoints = int(headerdata[-1])
+ points=[]
+ i+=1
+ for k in range(numpoints):
+ #pdb.set_trace()
+ pointdata = Lines[i+k].split()
+ x =float(pointdata[0])
+ y =float(pointdata[1])
+ points.append([x,y])
+ i+=numpoints
+ LinCostTables.append([CostTable, numpoints, points])
+
+ QuadCostTables = []
+ PolyCostTables = []
+
+ GenReserveData = []
+ for i in range(startgenreservedata,endgenreservedata):
+ data = Lines[i].split()
+ busNum = int(data[0])
+ ID = data[1]
+ ramp =float(data[2])
+ Pmax = float(data[3])
+ GenReserveData.append([busNum, ID, ramp, Pmax])
+
+ PeriodReserveData = []
+ for i in range(startperiodreservedata,endperiodreservedata):
+ data = Lines[i].split()
+ if len(data)==4:
+ ResNum = int(data[0])
+ ResLevel = float(data[1])
+ ResPeriod = float(data[2])
+ InService = int(data[3])
+ ParticipatingUnits = []
+ elif len(data)==2:
+ busNum = int(data[0])
+ ID = data[1]
+ ParticipatingUnits.append([busNum,ID])
+ elif 'End of Participating Reserve Units' in Lines[i]:
+ PeriodReserveData.append([ResNum,ResLevel,ResPeriod,InService,ParticipatingUnits])
+ else:
+ pass
+
+ AdjBusShuntData = []
+ for i in range(startadjbusshuntdata,endadjbusshuntdata):
+ data = Lines[i].split()
+ busNum = int(data[0])
+ ID = data[1]
+ SuscInit = float(data[2])
+ SuscMax = float(data[3])
+ SuscMin = float(data[4])
+ CostScale = float(data[5])
+ InService = int(data[6])
+ AdjBusShuntData.append([busNum,ID, SuscInit,SuscMax,SuscMin,CostScale,InService])
+
+ AdjLoadTables = []
+ for i in range(startadjloadtable,endadjloadtable):
+ data = Lines[i].split()
+ tableNum = int(data[0])
+ LoadMult = float(data[1])
+ Max = float(data[2])
+ Min = float(data[3])
+ CostScale = float(data[7])
+ InService = int(data[9])
+ AdjLoadTables.append([tableNum,LoadMult,Max,Min,CostScale,InService])
+
+ return GenDispatchData, DispTableData, LinCostTables, QuadCostTables, PolyCostTables, GenReserveData, PeriodReserveData, AdjBusShuntData, AdjLoadTables
+
+
+#to remve a list from a string "['wind 1', 'wind 2', 'charge']" --> ['wind 1', 'wind 2', 'charge']
+def RemoveListfromString(List):
+ List = List.replace("]","")
+ List = List.replace("[","")
+ List = List.replace(")","")
+ List = List.replace("(","")
+ List = List.replace("'","")
+ List = List.replace('"',"")
+ List = List.replace(" ","")
+ List = List.split(",")
+ return List
+
+def RemoveTuplesfromString(TList):
+ TL = RemoveListfromString(TList)
+ nTL = []
+ for i in range(len(TL)/2):
+ nTL.append([TL[2*i],float(TL[2*i+1])])
+ return nTL
+
+###Fonction de transfert vent-puissance d'une eolienne
+##def eol_old(wind, WTconfig):
+## Vcin = WTconfig ['cutinWS']
+## Vrate = WTconfig ['ratedWS']
+## Vcout = WTconfig ['cutoutWS']
+## Rho = WTconfig ['rho']
+## lossrate = WTconfig ['lossrate']
+## if wind <= Vcin :
+## Pnorm=0
+## elif wind < Vrate :
+## Pnorm=wind*(1-lossrate)#((wind**2-Vcin**2)/(Vrate**2-Vcin**2)*Rho/1.225*(1-lossrate))
+## elif wind < Vcout :
+## Pnorm = 1*(1-lossrate)
+## else :
+## Pnorm=0
+## return Pnorm
+
+def applyTF(x_in, TF):
+
+ X = []
+ P = []
+ for (x,p) in TF:
+ X.append(x)
+ P.append(p)
+
+
+ Pmax=max(P)
+ precision = 0.001
+ #calculate precision of values entered
+ for i in range(len(X)):
+ d1 = Decimal(str(X[i]))
+ d2 = Decimal(str(P[i]))
+ d1expo = d1.as_tuple().exponent
+ d2expo = d2.as_tuple().exponent
+ expo = np.minimum(d1expo,d2expo)
+ precision = min(10**(expo-1),precision)
+
+
+ #change to array type for consistency
+ X = np.array(X)
+ P = np.array(P)
+
+ #interpolate between values so that precise wind speed data doesnt output heavily discretized power levels
+ from scipy import interpolate
+ finterp = interpolate.interp1d(X,P, kind='linear')
+ Xmin = min(X)
+ Xmax = max(X)
+ Xnew = np.arange(Xmin,Xmax,precision)
+ Pnew = finterp(Xnew)
+
+ #calculate power by applying transfer function
+ if x_in >= Xmax-precision:
+ index = len(Pnew)-1
+ elif x_in <= Xmin + precision:
+ index = 0
+ else:
+ index = int(round((x_in-Xmin)/precision))
+ Power = Pnew[index]
+
+ PowerNorm = Power/Pmax #normalize
+
+ return PowerNorm
+
+
+
+def eol(WS, z_WS, pathWT, HH, alpha=1./7, PercentLoss = 5):
+
+ '''
+
+ Reconstitute wind production from wind speed histories for a single site.
+
+ syntax:
+ ACPower = ReconstituteWind(WS, z_WS, pathWT, N_turbines, HH, alpha=1./7, PercentLoss=5)
+
+ inputs:
+ WS: numpy array of wind speed measurements to be converted to production values
+ z_WS: height, in meters above ground level, of the wind speed measurements
+ pathWT: location of selected wind turbine technology's power curve file in computer file system
+ N_turbines: number of wind turbines in the installation/farm being modelled
+ HH: wind turbine hub height
+ alpha (optional, default = 1/7): exponential factor describing the vertical wind profile; used to extrapolate
+ wind data to hub height. Can be scalar or vector with same length as wind data.
+ PercentLoss (optional, default = 5): percent loss due to multiple effects: the wake effect of adjacent wind turbines,
+ cable resistance between wind turbine/farm and injection point, grid and turbine unavailability, extreme weather conditions, etc.
+.
+
+ outputs:
+ ACPower: numpy array of normalized expected wind production for the given wind farm.
+
+ '''
+
+
+ #open and treat wind turbine data in .pow file
+ f = open(pathWT)
+ lines = f.readlines()
+ WTdata = {}
+ WTdata["model"] = lines[0][1:-2]
+ WTdata['diameter'] = float(lines[1][1:-2])
+ WTdata['CutInWindSpeed'] = float(lines[4][1:-2])
+ WTdata['CutOutWindSpeed'] = float(lines[3][1:-2])
+ WTdata['PowerCurve'] = {}
+ WTdata['PowerCurve']['WindSpeed'] = np.arange(0, 31)
+ WTdata['PowerCurve']['Power'] = [float(0)] #in kW
+ for i in range(5,35):
+ WTdata['PowerCurve']['Power'].append(float(lines[i][1:-2]))
+
+ WTdata['Pmax']=max(WTdata['PowerCurve']['Power'])
+
+ #insert WT hub height
+ WTdata['z'] = HH
+
+ #correct wind speed values for appropriate height
+ WS_hh = WS*(WTdata['z']/z_WS)**alpha #wind speed at hub height
+
+ #calculate precision of cut in and cut out windspeeds
+ d1 = Decimal(str(WTdata['CutInWindSpeed']))
+ d2 = Decimal(str(WTdata['CutOutWindSpeed']))
+ expo = np.minimum(d1.as_tuple().exponent, d2.as_tuple().exponent)
+ precision = 10**(expo-1)
+
+ #insert points for cut-in and cut-out wind speeds
+ add_ci = 0
+ add_co= 0
+ if np.mod(WTdata['CutInWindSpeed'],1)==0:
+ add_ci = precision
+ if np.mod(WTdata['CutOutWindSpeed'],1)==0:
+ add_co = precision
+ i_cutin = np.where(WTdata['PowerCurve']['WindSpeed']>(WTdata['CutInWindSpeed']+add_ci))[0][0]
+ i_cutout = np.where(WTdata['PowerCurve']['WindSpeed']>(WTdata['CutOutWindSpeed']+add_co))[0][0] + 1 #+1 to account for addition of cut in point
+ WTdata['PowerCurve']['WindSpeed'] = list(WTdata['PowerCurve']['WindSpeed'])
+ WTdata['PowerCurve']['WindSpeed'].insert(i_cutin, WTdata['CutInWindSpeed']+add_ci)
+ WTdata['PowerCurve']['WindSpeed'].insert(i_cutout, WTdata['CutOutWindSpeed']+add_co)
+ WTdata['PowerCurve']['Power'].insert(i_cutin, 0)
+ WTdata['PowerCurve']['Power'].insert(i_cutout, 0)
+
+ #change to array type for consistency
+ WTdata['PowerCurve']['WindSpeed'] = np.array(WTdata['PowerCurve']['WindSpeed'])
+ WTdata['PowerCurve']['Power'] = np.array(WTdata['PowerCurve']['Power'])
+
+ #interpolate between values so that precise wind speed data doesnt output heavily discretized power levels
+ from scipy import interpolate
+ finterp = interpolate.interp1d(WTdata['PowerCurve']['WindSpeed'],WTdata['PowerCurve']['Power'], kind='linear')
+ Vnew = np.arange(0,30,precision)
+ Pnew = finterp(Vnew)
+
+ #calculate power produced by turbine in function of wind speed
+ Pac_turbine = []
+ for i, ws in enumerate(WS_hh):
+ if ws >= 30-precision:
+ index = len(Pnew)-1
+ else:
+ index = int(round(ws/precision)) #index of correct wind speed
+ Pac_turbine.append(Pnew[index]) #Power corresponds to value with same index as wind speed vector
+ Pac_turbine = np.array(Pac_turbine)
+
+ #account for Losses...currently a single loss factor but could imagine implementing a per-point method
+ #WakeEffects = 4 #3-8% for a typical farm, 0% for an individual windmill
+ #CableResistanceLosses = 1 #1-3% between windmills and electric counter, depending on voltage levels and cable length
+ #GridUnavalability = 1
+ #WTUnavailability = 3
+ #AcousticCurtailment = 1-4
+ #Freezing = 0.5
+ #LF = (1-WakeEffects/100)*(1-CableResistanceLosses/100) #loss factor
+ ACPower = Pac_turbine*(1-PercentLoss/100) #total AC power produced by wind turbine
+ ACPowerNorm = ACPower/WTdata['Pmax']
+ return ACPowerNorm
+
+def postOPFinitialization(sav_file,all_inputs_init,AdjLoadTables,init_gen=True,init_bus=True,init_fxshnt=True,init_swshnt=True,init_load=True,init_P0=False):
+
+ psspy.save(sav_file)
+
+ buses_init=all_inputs_init[0]
+ lines_init=all_inputs_init[1]
+ trans_init=all_inputs_init[2]
+ plants_init=all_inputs_init[3]
+ loads_init=all_inputs_init[4]
+ shunt_init=all_inputs_init[5]
+ motors_init=all_inputs_init[6]
+ trans3_init=all_inputs_init[7]
+ swshunt_init=all_inputs_init[8]
+
+ all_inputs_base=read_sav(sav_file)
+ buses_base=all_inputs_base[0]
+ lines_base=all_inputs_base[1]
+ trans_base=all_inputs_base[2]
+ plants_base=all_inputs_base[3]
+ loads_base=all_inputs_base[4]
+ shunt_base=all_inputs_base[5]
+ motors_base=all_inputs_base[6]
+ trans3_base=all_inputs_base[7]
+ swshunt_base=all_inputs_base[8]
+
+ _i=psspy.getdefaultint()
+ _f=psspy.getdefaultreal()
+ _s=psspy.getdefaultchar()
+
+ #re-initialize generators to original value
+ if init_gen:
+ for plant in plants_init:
+ busNum = plant[0]
+ ID = plant[2]
+ P = plant[3]
+ Q = plant[4]
+ psspy.machine_chng_2(busNum,ID,[_i,_i,_i,_i,_i,_i],\
+ [P, Q,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
+
+ #re-initialize voltages and angles
+ if init_bus:
+ for bus in buses_init:
+ busNum = bus[0]
+ Upu = bus[2]
+ angleDegrees = bus[7]
+ psspy.bus_chng_3(busNum,[_i,_i,_i,_i],[_f,Upu,angleDegrees,_f,_f,_f,_f],_s)
+
+ #re-initialize fixed shunts to original value
+ if init_fxshnt:
+ for shunt in shunt_base:
+ sh_init = next((sh for sh in shunt_init if (sh[0] == shunt[0]) and sh[5]==shunt[5]),'not found')
+ if sh_init == 'not found': #this means the added shunt is an adjustable bus shunt with no existing shunt at the same bus
+ #initialize the fixed shunt to 0
+ ID = shunt[5] #(ID always == 1)
+ busNum = shunt[0]
+ Qnom = 0
+ psspy.shunt_chng(busNum,ID,_i,[_f, Qnom])
+ else: #this shunt already existed in initial saved case file, re-initialize to initial value
+ ID = sh_init[5]
+ busNum = sh_init[0]
+ Q = sh_init[2]
+ Qnom = sh_init[4]
+ psspy.shunt_chng(busNum,ID,_i,[_f, Qnom])
+
+ #re-initialize switched shunts to original values
+ if init_swshnt:
+ for swshunt in swshunt_init:
+ busNum = swshunt[0]
+ Q = swshunt[2]
+ Qnom = swshunt[4]
+ psspy.switched_shunt_chng_3(busNum,[_i,_i,_i,_i,_i,_i,_i,_i,_i,_i,_i,_i],[_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,Qnom,_f],"")
+
+ #re-initialize loads to original values
+ if init_load:
+ # for load in loads_init:
+ # busNum = load[0]
+ # ID = load[5]
+ # P = load[1]
+ # Q = load[2]
+ # psspy.load_chng_4(busNum,ID,[_i,_i,_i,_i,_i,_i],[P, Q,_f,_f,_f,_f])
+ for table in AdjLoadTables:
+ tableNum = table[0]
+ LoadMult = table[1]
+ psspy.opf_adjload_tbl(tableNum,[_i,_i,_i],[LoadMult,_f,_f,_f,_f,_f,_f])
+
+ #initialize dispatchable generators to P = 0
+ if init_P0:
+ for gen in GenDispatchData:
+ busNum = gen[0]
+ ID = gen[1]
+ dispatch = gen[2]
+ if dispatch>0:
+ P=0
+ psspy.machine_chng_2(busNum,ID,[_i,_i,_i,_i,_i,_i],\
+ [P,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
+
+ ##save changes
+ psspy.save(sav_file)
+
+ return
+
+
+#Fonction permettant de lire les donnees qui nous interessent et de les mettre dans une matrice
+def read_sav(doc):
+ psspy.case(doc)
+ # Select what to report
+ if psspy.bsysisdef(0):
+ sid = 0
+ else: # Select subsytem with all buses
+ sid = -1
+
+ flag_bus = 2 #all buses 1 # in-service
+ flag_plant = 4 #all machines
+ flag_load = 4 #all loads 1 # in-service
+ flag_motor = 4 #all motors 1 # in-service
+ flag_swsh = 4 #all fixed shunts 1 # in-service
+ flag_brflow = 1 # in-service
+ owner_brflow = 1 # bus, ignored if sid is -ve
+ ties_brflow = 5 # ignored if sid is -ve
+ entry = 1 # gives a single entry (each branch once)
+
+ #Bus data (number, basekV, pu, name, ...) : PSSe has 3 functions one for integer data, one for real data and one for strings
+ istrings = ['number']
+ ierr, idata = psspy.abusint(sid, flag_bus, istrings)
+ buses=idata
+
+ rstrings = ['base','pu']
+ ierr, rdata = psspy.abusreal(sid, flag_bus, rstrings)
+ buses.append(rdata[0])
+ buses.append(rdata[1])
+
+ cstrings = ['name']
+ ierr, cdata = psspy.abuschar(sid, flag_bus, cstrings)
+ cdata[0]=map( lambda s: s.replace("\n"," "),cdata[0])
+ buses.append(cdata[0])
+
+ rstrings = ['nvlmlo','nvlmhi']
+ ierr, rdata = psspy.abusreal(sid, flag_bus, rstrings)
+ buses.append(rdata[0])
+ buses.append(rdata[1])
+
+ istrings = ['type']
+ ierr, idata = psspy.abusint(sid, flag_bus, istrings)
+ buses.append(idata[0])
+
+ rstrings = ['angled']
+ ierr, rdata = psspy.abusreal(sid, flag_bus, rstrings)
+ buses.append(rdata[0])
+
+ buses=zip(*buses) # transpose the matrix
+
+ del idata, rdata, istrings, rstrings
+
+ #Lines data (from, to, amps, rate%a, ploss, qloss)
+ flag=2 #All non-transformer branches
+ istrings = ['fromnumber','tonumber']
+ ierr, idata = psspy.abrnint(sid, owner_brflow, ties_brflow, flag, entry, istrings)
+ lines=idata
+
+ rstrings=['amps','pctratea','pctrateb','pctratec','p','q']
+ ierr, rdata = psspy.abrnreal(sid, owner_brflow, ties_brflow, flag, entry, rstrings)
+ for rc in range (np.matrix(rdata).shape[0]) :
+ lines.append(rdata[rc])
+
+ cstrings=['fromname','toname','id']
+ ierr, cdata = psspy.abrnchar(sid, owner_brflow, ties_brflow, flag, entry, cstrings)
+ for rc in range (np.matrix(cdata).shape[0]) :
+ cdata[rc]=map( lambda s: s.replace("\n"," "),cdata[rc])
+ lines.append(cdata[rc])
+
+ #eliminate breakers and switches
+ linesAll=zip(*lines) # transpose the matrix
+ lines = []
+ for line in linesAll:
+ if ('@' not in line[10]) and ('*' not in line[10]):
+ lines.append(line)
+
+ del idata, rdata, istrings, rstrings
+
+ #2 windings transformers data (from, to, amps, rate%a, ploss, qloss)
+ flag=6 #All transformer branches
+ istrings = ['fromnumber','tonumber']
+ ierr, idata = psspy.abrnint(sid, owner_brflow, ties_brflow, flag, entry, istrings)
+ transf=idata
+
+ rstrings=['amps','pctratea','pctrateb','pctratec','p','q']
+ ierr, rdata = psspy.abrnreal(sid, owner_brflow, ties_brflow, flag, entry, rstrings)
+ for rc in range (np.matrix(rdata).shape[0]) :
+ transf.append(rdata[rc])
+
+ cstrings=['fromname','toname','id']
+ ierr, cdata = psspy.abrnchar(sid, owner_brflow, ties_brflow, flag, entry, cstrings)
+ for rc in range (np.matrix(cdata).shape[0]) :
+ cdata[rc]=map( lambda s: s.replace("\n"," "),cdata[rc])
+ transf.append(cdata[rc])
+
+ transf=zip(*transf) # transpose the matrix
+
+ del idata, rdata, istrings, rstrings
+
+ #3 windings transformers data (from, to, amps, rate%a, ploss, qloss)
+ #sid = -1 #assume a subsystem containing all buses in working case
+ owner_3flow = 1 #1 = use bus ownership 2 = use tfo ownership
+ ties_3flow = 3 #ignored bc sid is negative. 3 = interior subsystem and subsystem tie 3 winding transformers
+ flag=3 #all 3 winding transfo windings
+ entry = 2 #1=winding 1 bus order, 2=transformer name order
+
+ istrings = ['wind1number','wind2number','wind3number', 'wndnum']
+ ierr, idata = psspy.awndint(sid, owner_3flow, ties_3flow, flag, entry, istrings)
+ transf3 = idata
+
+ rstrings=['amps','pctratea','pctrateb','pctratec','p','q']
+ ierr, rdata = psspy.awndreal(sid, owner_3flow, ties_3flow, flag, entry, rstrings)
+ for rc in range (np.matrix(rdata).shape[0]) :
+ transf3.append(rdata[rc])
+
+ cstrings=['wind1name','wind2name','wind3name','id']
+ ierr, cdata = psspy.awndchar(sid, owner_3flow, ties_3flow, flag, entry, cstrings)
+ for rc in range (np.matrix(cdata).shape[0]) :
+ cdata[rc]=map( lambda s: s.replace("\n"," "),cdata[rc])
+ transf3.append(cdata[rc])
+
+ transf3=zip(*transf3) # transpose the matrix
+
+ del idata, rdata, istrings, rstrings
+
+
+ #Machines data (bus, inservice, number, pgen, qgen, mvabase, pmax, qmax, name)
+ istrings = ['number','status']
+ ierr, idata = psspy.amachint(sid, flag_plant, istrings)
+ plants=idata
+
+ cstrings = ['id']
+ ierr, cdata = psspy.amachchar(sid, flag_plant, cstrings)
+ for rc in range (np.matrix(cdata).shape[0]) :
+ plants.append(cdata[rc])
+
+ rstrings = ['pgen','qgen','mbase','pmax','qmax']
+ ierr, rdata = psspy.amachreal(sid, flag_plant, rstrings)
+ for rc in range (np.matrix(rdata).shape[0]) :
+ plants.append(rdata[rc])
+
+ cstrings = ['name']
+ ierr, cdata = psspy.amachchar(sid, flag_plant, cstrings)
+ cdata[0]= map( lambda s: s.replace("\n"," "),cdata[0])
+ plants.append(cdata[0])
+
+ rstrings = ['pmin','qmin']
+ ierr, rdata = psspy.amachreal(sid, flag_plant, rstrings)
+ for rc in range (np.matrix(rdata).shape[0]) :
+ plants.append(rdata[rc])
+
+ istrings = ['wmod']
+ ierr, idata = psspy.amachint(sid, flag_plant, istrings)
+ for rc in range (np.matrix(idata).shape[0]) :
+ plants.append(idata[rc])
+
+ nb_plants=np.matrix(plants).shape[1]
+ for rc in range (0,nb_plants) :
+ plants[3][rc]=float(plants[3][rc]*int(plants[1][rc])) # If the plant isn't in service its production is fixed to zero
+ plants[4][rc]=float(plants[4][rc]*int(plants[1][rc])) # If the plant isn't in service its production is fixed to zero
+
+ plants=zip(*plants) # transpose the matrix
+
+ del idata, rdata, cdata
+
+ #Loads data (bus, active, reactive, status, name, id)
+ istrings = ['number']
+ ierr, idata = psspy.aloadint(sid, flag_load, istrings)
+ loads=idata
+
+ xstrings = ['mvaact']
+ ierr, xdata = psspy.aloadcplx(sid, flag_load, xstrings)
+ loads.append(np.real(xdata)[0]) # Append the real part of the load
+ loads.append(np.imag(xdata)[0]) #Append the imaginary part of the load
+
+ istrings = ['status']
+ ierr, idata = psspy.aloadint(sid, flag_load, istrings)
+ loads.append(idata[0])
+
+ cstrings = ['name', 'id']
+ ierr, cdata = psspy.aloadchar(sid, flag_load, cstrings)
+ cdata[0]=map( lambda s: s.replace("\n"," "),cdata[0])
+ loads.append(cdata[0])
+ loads.append(cdata[1])
+
+ nb_loads=np.matrix(loads).shape[1]
+ for rc in range (0,nb_loads) :
+ loads[1][rc]=float(loads[1][rc]*int(loads[3][rc])) # If the load isn't in service its consumption is fixed to zero
+ loads[2][rc]=float(loads[2][rc]*int(loads[3][rc])) # If the load isn't in service its consumption is fixed to zero
+
+ loads=zip(*loads) # transpose the matrix
+
+ del idata, cdata, xdata
+
+ #Fixed shunt data (number, MVAR, name, ...)
+ istrings = ['number','status']
+ ierr, idata = psspy.afxshuntint(sid, flag_bus, istrings)
+ shunt=idata
+
+ xstrings = ['shuntact']
+ ierr, xdata = psspy.afxshuntcplx(sid, flag_bus, xstrings)
+ shunt.append(np.imag(xdata)[0]) #Append the imaginary part of the shunt
+
+ cstrings = ['name']
+ ierr, cdata = psspy.afxshuntchar(sid, flag_bus, cstrings)
+ cdata[0]=map( lambda s: s.replace("\n"," "),cdata[0])
+ shunt.append(cdata[0])
+
+ xstrings = ['shuntnom']
+ ierr, xdata = psspy.afxshuntcplx(sid, flag_bus, xstrings)
+ shunt.append(np.imag(xdata)[0]) #Append the imaginary part of the shunt
+
+ cstrings = ['id']
+ ierr, cdata = psspy.afxshuntchar(sid, flag_bus, cstrings)
+ cdata[0]=map( lambda s: s.replace("\n"," "),cdata[0])
+ shunt.append(cdata[0])
+
+ nb_sh=np.matrix(shunt).shape[1]
+ for rc in range (0,nb_sh) : # If the swshunt isn't in service its MVAR is fixed to zero
+ shunt[2][rc]=float(shunt[2][rc]*int(shunt[1][rc]))
+ shunt[4][rc]=float(shunt[4][rc]*int(shunt[1][rc]))
+
+ shunt=zip(*shunt) # transpose the matrix
+
+ del idata, cdata, xdata
+
+ #Switched shunt data (number, MVAR, name, ...)
+ istrings = ['number','status']
+ ierr, idata = psspy.aswshint(sid, flag_swsh, istrings)
+ swshunt=idata
+ status = np.array(swshunt[1])
+
+
+ rstrings = ['bswact']
+ ierr, rdata = psspy.aswshreal(sid, flag_swsh, rstrings)
+ swshunt.append(rdata[0]) #Append the imaginary part of the load
+
+ cstrings = ['name']
+ ierr, cdata = psspy.aswshchar(sid, flag_swsh, cstrings)
+ #cdata[0]=map( lambda s: s.replace("\n"," "),cdata[0])
+ swshunt.append(cdata[0])
+
+ rstrings = ['bswnom']
+ ierr, rdata = psspy.aswshreal(sid, flag_swsh, rstrings)
+ swshunt.append(rdata[0]) #Append the imaginary part of the load
+
+ nb_swsh=np.matrix(swshunt).shape[1]
+ for rc in range (0,nb_swsh) : # If the swshunt isn't in service its MVAR is fixed to zero
+ swshunt[2][rc]=float(swshunt[2][rc]*int(swshunt[1][rc]))
+ swshunt[4][rc]=float(swshunt[4][rc]*int(swshunt[1][rc]))
+
+ swshunt=zip(*swshunt) # transpose the matrix
+
+ del idata, cdata, rdata
+
+ #Motors data (bus, active, reactive, status, name, id)
+ istrings = ['number']
+ ierr, idata = psspy.aindmacint(sid, flag_motor, istrings)
+ motors=idata
+
+ rstrings = ['p','q']
+ ierr, rdata = psspy.aindmacreal(sid, flag_motor, rstrings)
+ motors.append(rdata[0]) #Append the real part of the motor load
+ motors.append(rdata[1]) #Append the imaginary part of the motor load
+
+ istrings = ['status']
+ ierr, idata = psspy.aindmacint(sid, flag_motor, istrings)
+ motors.append(idata[0])
+
+ cstrings = ['name', 'id']
+ ierr, cdata = psspy.aindmacchar(sid, flag_motor, cstrings)
+ cdata[0]=map( lambda s: s.replace("\n"," "),cdata[0])
+ motors.append(cdata[0])
+ motors.append(cdata[1])
+
+ nb_motors=np.matrix(motors).shape[1]
+ for rc in range (0,nb_motors) :
+ motors[1][rc]=float(motors[1][rc]*int(motors[3][rc])) # If the load isn't in service its consumption is fixed to zero
+ motors[2][rc]=float(motors[2][rc]*int(motors[3][rc])) # If the load isn't in service its consumption is fixed to zero
+
+ motors=zip(*motors) # transpose the matrix
+
+ del idata, cdata, rdata
+
+ return buses, lines, transf, plants, loads, shunt, motors, transf3, swshunt
+
+def MyLogger(x,y,z,logCSVfilename,ite):
+ f=open(logCSVfilename, 'a')
+ f.write(str(ite)+';')
+ f.write(";")
+ nx = len(x)
+ for i in range(0,nx):
+ f.write(str(x[i]))#f.write("%f;" % (x[i]))
+ f.write(";")
+ f.write(";")
+ nz = len(z)
+ for i in range(0,nz):
+ try:
+ f.write("%f;" % (z[i]))
+ except:
+ f.write(str(z[i])+";")
+ f.write(";")
+ ny = len(y)
+ for j in range(0,ny):
+ f.write("%f;" % (y[j]))
+ f.write("\n")
+ f.close()
+
+# Fonction pour ecrire un fichier de sortie type csv pour chaque type de grandeur de sortie
+def MyMultiLogger (x, y, sizeY, z, ite, folder, day, fich, hour):
+ global ny
+ y0=0
+ for fich in range (np.size(sizeY,0)):
+ multilogfilename=folder+"\N"+day+"\Y"+str(fich)+"simulationDClog_"+hour+".csv"
+ f=open(multilogfilename, 'a')
+ f.write("%f;" % (ite))
+ f.write(";")
+ nx = len(x)
+ for i in range(0,nx):
+ f.write("%f;" % (x[i]))
+ f.write(";")
+ nz = len(z)
+ for i in range(0,nz):
+ f.write("%f;" % (z[i]))
+ f.write(";")
+ ny = sizeY[fich]
+ for j in range(0,ny):
+ f.write("%f;" % (y[j+y0]))
+ f.write("\n")
+ f.close()
+ y0 += ny
+ print( "Fichiers "+str(ite)+" enregistres\n\n")
+
+# Analyses graphiques
+def graphical_out (inputSample, outputSampleAll, inputDim, outputDim, montecarlosize) :
+ print "\n\n\n Writing graphical analysis files..."
+ # A Pairwise scatter plot of the inputs
+ myGraph = Graph()
+ myPairs = Pairs(inputSample, 'Inputs relations', inputSample.getDescription(), "red", "bullet")
+ myGraph.add(Drawable(myPairs))
+ myGraph.draw("Input Samples",640,480,GraphImplementation.PDF)
+ #View(myGraph.getBitmap())
+ print 'Input pairwise scatterplot done...'
+
+ # A Pairwise scatter plot of the outputs
+ myGraph = Graph()
+ myPairs = Pairs(outputSampleAll, 'Output relations', outputSampleAll.getDescription(), "red", "bullet")
+ myGraph.add(Drawable(myPairs))
+ myGraph.draw("Output Samples",640,480,GraphImplementation.PDF)
+ #View(myGraph.getBitmap())
+ print 'Output pairwise scatterplot done...'
+
+ # A Pairwise scatter plot of the inputs/outputs
+ # Draw all scatter plots yj vs xi
+ for j in range(outputDim):
+ outputSamplej=outputSampleAll.getMarginal(j)
+ Ylabelstr=outputSamplej.getDescription()[0]
+ for i in range(inputDim):
+ inputSamplei=inputSample.getMarginal(i)
+ Xlabelstr=inputSamplei.getDescription()[0]
+ X=NumericalSample(montecarlosize,2)
+ for k in range(montecarlosize):
+ X[k,0]=inputSamplei[k][0]
+ X[k,1]=outputSamplej[k][0]
+ myGraph = Graph()
+ myCloud=Cloud(X);
+ mytitle=Ylabelstr+"vs"+Xlabelstr
+ myGraph.add(Drawable(myCloud))
+ myGraph.setAxes(1)
+ myGraph.setXTitle(Xlabelstr)
+ myGraph.setYTitle(Ylabelstr)
+ myGraph.draw(mytitle,640,480,GraphImplementation.PDF)
+ #ViewImage(myGraph.getBitmap())
+ print 'Input/Output pairwise scatterplot done...'
+
+ # An histogram of the inputs
+ for i in range(inputDim):
+ inputSamplei=inputSample.getMarginal(i)
+ myGraph = VisualTest.DrawHistogram(inputSamplei)
+ labelarray=inputSamplei.getDescription()
+ labelstr=labelarray[0]
+ myGraph.setTitle(labelstr)
+ myGraph.setName(labelstr)
+ myGraph.setXTitle(labelstr)
+ myGraph.setYTitle("Frequency")
+ myGraph.draw(labelstr,640,480,GraphImplementation.PDF)
+ #View(myGraph.getBitmap())
+ print 'Input histogram done...'
+
+ # An histogram of the outputs
+ for j in range(outputDim):
+ outputSamplej=outputSampleAll.getMarginal(j)
+ myGraph = VisualTest.DrawHistogram(outputSamplej)
+ labelarray=outputSamplej.getDescription()
+ labelstr=labelarray[0]
+ myGraph.setTitle(labelstr)
+ myGraph.setName(labelstr)
+ myGraph.setXTitle(labelstr)
+ myGraph.setYTitle("Frequency")
+ myGraph.draw(labelstr,640,480,GraphImplementation.PDF)
+ #View(myGraph.getBitmap())
+ print 'Output histogram done'
+ print 'Graphical output terminated'
+
+
+def config_contingency(LinesList,GroupsList,TransformersList,LoadsList,MotorsList) :
+
+ lines_con=[]
+ groups_con=[]
+ loads_con = []
+ transfos_con = []
+ motors_con = []
+ sizeLines = len(LinesList)
+ sizeGroups = len(GroupsList)
+ sizeTransfos = len(TransformersList)
+ sizeLoads = len(LoadsList)
+ sizeMotors = len(MotorsList)
+ val=[]
+ prob=[]
+
+ for i in range(sizeLines+sizeGroups+sizeTransfos + sizeLoads + sizeMotors) :
+ val.append(int(i))
+ for i in range (sizeLines) :
+ lines_con.append(LinesList[i][0])
+ prob.append(LinesList[i][1])
+ for i in range (sizeGroups) :
+ prob.append(GroupsList[i][1])
+ groups_con.append(GroupsList[i][0])
+ for i in range (sizeTransfos) :
+ prob.append(TransformersList[i][1])
+ transfos_con.append(TransformersList[i][0])
+ for i in range (sizeLoads) :
+ prob.append(LoadsList[i][1])
+ loads_con.append(LoadsList[i][0])
+ for i in range (sizeMotors) :
+ prob.append(MotorsList[i][1])
+ motors_con.append(MotorsList[i][0])
+
+ return lines_con, groups_con, transfos_con, loads_con, motors_con, val, prob
+
+##def config_contingency(LinesPath,GeneratorsPath,TransformersPath,LoadsPath) :
+##
+## lines_con=[]
+## groups_con=[]
+## loads_con = []
+## transfos_con = []
+##
+## # Loading of lines contingency configuration
+## if LinesPath != '':
+## f=open(LinesPath,"r")
+## lines=f.readlines()
+## f.close()
+## for i in range (len(lines)) :
+## line=lines[i].split(";")
+## try :
+## int(line[1])
+## except ValueError :
+## pass
+## else :
+## if line[0] == '' :
+## line[0] = '0'
+## lines_con.append([int(line[1]), int(line[3]), str(line[5]),float(line[0].replace(',','.'))])
+##
+## # Loading of lines contingency configuration
+## if TransformersPath != '':
+## f=open(TransformersPath,"r")
+## lines=f.readlines()
+## f.close()
+## for i in range (len(lines)) :
+## line=lines[i].split(";")
+## try :
+## int(line[1])
+## except ValueError :
+## pass
+## else :
+## if line[0] == '' :
+## line[0] = '0'
+## transfos_con.append([int(line[1]), int(line[3]), str(line[5]),float(line[0].replace(',','.'))])
+##
+## # Loading of groups contingency configuration
+## if GeneratorsPath != '':
+## f=open(GeneratorsPath,"r")
+## lines=f.readlines()
+## f.close()
+## for i in range (len(lines)) :
+## line=lines[i].split(";")
+## try :
+## int(line[1])
+## except ValueError :
+## pass
+## else :
+## if line[0] == '' :
+## line[0] = '0'
+## groups_con.append([int(line[1]), int(line[3]),float(line[0].replace(',','.'))])
+##
+## # Loading of loads contingency configuration
+## if LoadsPath != '':
+## f=open(LoadsPath,"r")
+## lines=f.readlines()
+## f.close()
+## for i in range (len(lines)) :
+## line=lines[i].split(";")
+## try :
+## int(line[1])
+## except ValueError :
+## pass
+## else :
+## if line[0] == '' :
+## line[0] = '0'
+## loads_con.append([int(line[1]), int(line[3]), float(line[0].replace(',','.'))])
+##
+## sizeLines = len(lines_con)
+## sizeGroups = len(groups_con)
+## sizeTransfos = len(transfos_con)
+## sizeLoads = len(loads_con)
+## val=[]
+## prob=[]
+## for i in range(sizeLines+sizeGroups+sizeTransfos + sizeLoads) :
+## val.append(int(i))
+##
+## for i in range (sizeLines) :
+## prob.append(lines_con[i][3])
+## for i in range (sizeGroups) :
+## prob.append(groups_con[i][2])
+## for i in range (sizeTransfos) :
+## prob.append(transfos_con[i][3])
+## for i in range (sizeLoads) :
+## prob.append(loads_con[i][2])
+## return lines_con, groups_con, transfos_con, loads_con, val, prob
+
+def LoadARMA(time_serie_file, time_serie_SS, time_serie_TH) :
+ f=open(time_serie_file,"r")
+ lines=f.readlines()
+ N=len(lines)
+ Xt=[]
+ for i in range(N) :
+ Xt.append([float(lines[i])])
+
+ myTG=RegularGrid(0,float(time_serie_SS),N)
+ TS=TimeSeries(myTG,NumericalSample(Xt))
+ myWN=WhiteNoise(Distribution(Normal(0,1)),myTG)
+ myState=ARMAState(TS.getSample(),NumericalSample())
+ p=12
+ q=0
+ d=1
+ myFactory = ARMALikelihoodFactory ( p , q , d )
+ myARMA = myFactory.build(TS)
+
+ myARMA.setState(myState)
+
+ AR = myARMA.getARCoefficients()
+ MA = myARMA.getMACoefficients()
+
+ ts = myARMA.getRealization()
+ ts.setName('A realization')
+ myTSGraph=ts.drawMarginal(0)
+ myTSGraph.draw('Realization'+str(p)+","+str(q),640,480,GraphImplementation.PDF)
+ myARMAState=myARMA.getState()
+
+ #Make a prediction of the future on next Nit instants
+ Nit = int(time_serie_TH)
+ myARMA2=ARMA(AR,MA,myWN,myARMAState)
+ possibleFuture=myARMA2.getFuture(Nit)
+ possibleFuture.setName('Possible future')
+
+ Xt2=[]
+ for i in range (len(possibleFuture)):
+ Xt2.append(possibleFuture.getValueAtIndex(i)[0])
+ Max=float(max(Xt2))
+ Min=float(min(Xt2))
+ h=float(Max-Min)
+ for i in range (len(possibleFuture)):
+ value= (Xt2[i]-Min+h/3)/(Max-Min+h/3)
+ possibleFuture.setValueAtIndex(i,NumericalPoint(1,value))
+
+ myFG=possibleFuture.drawMarginal(0)
+ myFG.draw('Future'+str(Nit),640,480,GraphImplementation.PDF)
+
+ return possibleFuture
+
+def LoadTS(time_serie_file) :
+ TS=[]
+ for i in range(len(time_serie_file)) :
+ if time_serie_file[i] == -1 :
+ pass
+ else :
+ f=open(time_serie_file[i],"r")
+ lines=f.readlines()
+ N=len(lines)
+ Xt=[]
+ for j in range(N) :
+ try :
+ float(lines[i])
+ except ValueError :
+ lines[i] = commaToPoint(lines[i])
+ else :
+ pass
+ Xt.append([float(lines[j])])
+ TS.append(Xt)
+ return TS
+
+
+def KSDist(lines) :
+ print "Creating Kernel Smoothing distribution "
+ N=len(lines)
+ Xt=[]
+ for i in range(N) :
+ if lines[i] == "\n" :
+ print "End of file"
+ break
+ else :
+ try :
+ float(lines[i])
+ except ValueError :
+ lines[i] = commaToPoint(lines[i])
+ else :
+ pass
+ Xt.append([float(lines[i])])
+ NS=NumericalSample(Xt)
+ kernel=KernelSmoothing(Uniform())
+ myBandwith = kernel.computeSilvermanBandwidth(NS)
+ KS=kernel.build(NS,myBandwith,1)
+ return KS
+
+
+def threshold (inputRandomVector, outputVariableOfInterest,pssefun,inputDistribution) :
+ # We create a quadraticCumul algorithm
+ myQuadraticCumul = QuadraticCumul(outputVariableOfInterest)
+
+ # We compute the several elements provided by the quadratic cumul algorithm
+ # and evaluate the number of calculus needed
+ nbBefr = pssefun.getEvaluationCallsNumber()
+
+ # Mean first order
+ meanFirstOrder = myQuadraticCumul.getMeanFirstOrder()[0]
+ nbAfter1 = pssefun.getEvaluationCallsNumber()
+
+ # Mean second order
+ meanSecondOrder = myQuadraticCumul.getMeanSecondOrder()[0]
+ nbAfter2 = pssefun.getEvaluationCallsNumber()
+
+ # Standard deviation
+ stdDeviation = sqrt(myQuadraticCumul.getCovariance()[0,0])
+ nbAfter3 = pssefun.getEvaluationCallsNumber()
+
+ print "First order mean=", myQuadraticCumul.getMeanFirstOrder()[0]
+ print "Evaluation calls number = ", nbAfter1 - nbBefr
+ print "Second order mean=", myQuadraticCumul.getMeanSecondOrder()[0]
+ print "Evaluation calls number = ", nbAfter2 - nbAfter1
+ print "Standard deviation=", sqrt(myQuadraticCumul.getCovariance()[0,0])
+ print "Evaluation calls number = ", nbAfter3 - nbAfter2
+
+ print "Importance factors="
+ for i in range(inputRandomVector.getDimension()) :
+ print inputDistribution.getDescription()[i], " = ", myQuadraticCumul.getImportanceFactors()[i]
+ print ""
+
+def getUserDefined (values):
+ val = []
+ prob = []
+ for a in values:
+ val.append(a[0])
+ prob.append(a[1])
+ dim = len (val)
+
+ prob = map(float,prob)
+ prob = [p/sum(prob) for p in prob]
+
+## weights = NumericalPoint(prob)
+## Vals = []
+## for i in range(dim):
+## Vals.append([float(val[i]),float(val[i])+0.000001])
+## ranges = NumericalSample(Vals)
+## return UserDefined(ranges, weights)
+ coll = UserDefinedPairCollection()
+ for i in range (dim) :
+ UDpair=UserDefinedPair(NumericalPoint(1,float(val[i])),float(prob[i]))
+ coll.add(UDpair)
+ return UserDefined(coll)
+
+
+def getHistogram (values) :
+ step = []
+ prob = []
+ for a in values:
+ step.append(a[0])
+ prob.append(a[1])
+ dim = len (step)
+ myHistogram = HistogramPairCollection(dim)
+ for i in range (dim) :
+ try:
+ myHistogram[i]=HistogramPair(float(step[i]),float(prob[i]))
+ except:
+ pass
+ return myHistogram
+
+
+
+def getUserLaw(LawDico):
+ time_serie = 0
+ time_serie_file = ''
+ time_serie_SS = 0
+ time_serie_TH = 0
+ if LawDico['Law']=="Normal":
+ law = Normal(float(LawDico['Mu']),float(LawDico['Sigma']))#Openturns
+ elif LawDico['Law']=="Uniform":
+ law=Uniform(float(LawDico['A']),float(LawDico['B']))
+ elif LawDico['Law']=="Exponential":
+ law=Exponential(float(LawDico['Lambda']),float(LawDico['Gamma']))
+ elif LawDico['Law']=="Weibull":
+ if LawDico['Settings']=='AlphaBeta':
+ law=Weibull(float(LawDico['Alpha']),float(LawDico['Beta']),float(LawDico['Gamma']))
+ elif LawDico['Settings']=='MuSigma':
+ law=Weibull(float(LawDico['Mu']),float(LawDico['Sigma']),float(LawDico['Gamma']),Weibull.MUSIGMA)
+ elif LawDico['Law']=="TruncatedNormal":
+ law=TruncatedNormal(float(LawDico['MuN']),float(LawDico['SigmaN']),float(LawDico['A']),float(LawDico['B']))
+ elif LawDico['Law']=="UserDefined":
+ law=UserDefined(getUserDefined (LawDico['Values']))
+ elif LawDico['Law']=="Histogram":
+ law=Histogram(LawDico['First'], getHistogram (LawDico['Values']))
+ elif LawDico['Law']=="PDF_from_file":
+ law=KSDist(LawDico['FileContents'])
+ elif LawDico['Law']=="TimeSeries_from_file":
+ law = Uniform(0.999999,1)
+ time_serie=1
+ time_serie_file=LawDico['FileContents']
+ else :
+ law = Uniform(0.999999,1)
+ return law, [time_serie, time_serie_file] #[time_serie, time_serie_file, time_serie_SS, time_serie_TH]
+
+
+
+
+def contingency_automatic (dfxPath, acccPath, rate) :
+ psspy.accc_with_dsp_3( 0.5,[0,0,0,1,1,2,0,0,0,0,0],r"""ALL""",dfxPath,acccPath,"","","")
+ psspy.accc_single_run_report_4([1,int(rate),int(rate),1,1,0,1,0,0,0,0,0],[0,0,0,0,6000],[ 0.5, 5.0, 100.0,0.0,0.0,0.0, 99999.],acccPath)
+
+ rslt_summary=pssarrays.accc_summary(acccPath)
+ if int(rate) == 1 :
+ rate = rslt_summary.rating.a
+ elif int(rate) == 2 :
+ rate = rslt_summary.rating.b
+ elif int(rate) == 3 :
+ rate = rslt_summary.rating.c
+ else :
+ print "NO RATE CHOOSEN"
+
+ Labels=rlst.colabel
+ contin_load=[]
+ for label in Labels :
+ t=[]
+ rslt=pssarrays.accc_solution(acccPath,contingency,label,0.5,5.0)
+ ampFlow=rslt.ampflow
+ for i in range (len(rA)) :
+ t.append(ampFlow[i]/rate[i])
+ contin_load.append(t)
+ return contin_load
+
+def commaToPoint (string) :
+ stringReplaced = string.replace(',','.')
+ return stringReplaced
+
+def PSSEFunct(dico,x):
+ if 1:
+ #try:
+## if dico['TStest']==1:
+## os.chdir(dico['doc_base']) #to work in right directory of the package
+## sys.stdout=open('process num'+str(os.getpid())+'_package '+\
+## str(dico['num_pac'])+'.out','w')
+
+ #Get all the dico values
+ TStest=dico['TStest']
+ sizeY0=dico['sizeY0']
+ sizeY1=dico['sizeY1']
+ sizeY2=dico['sizeY2']
+ sizeY3=dico['sizeY3']
+ sizeY4=dico['sizeY4']
+ sizeY5=dico['sizeY5']
+ sizeY6=dico['sizeY6']
+ sizeY7=dico['sizeY7']
+ sizeY8=dico['sizeY8']
+ sizeY=dico['sizeY']
+ Xt=dico['Xt']
+ folder=dico['folder']
+ folderN_1=dico['folderN_1']
+ day=dico['day']
+ doc_base=dico['doc_base']
+ PSSEParams=dico['PSSEParams']
+ _i=dico['_i']
+ _f=dico['_f']
+ _s=dico['_s']
+ if dico['PSSEParams']['I_MAX']=='RateA':
+ Irate_num=1
+ elif dico['PSSEParams']['I_MAX']=='RateB':
+ Irate_num=2
+ elif dico['PSSEParams']['I_MAX']=='RateC':
+ Irate_num=3
+ num_pac=dico['num_pac']
+ logCSVfilename=dico['logCSVfilename']
+ continLines=dico['continLines']
+ continGroups=dico['continGroups']
+ continTransfos=dico['continTransfos']
+ continLoads=dico['continLoads']
+ continMotors=dico['continMotors']
+ continVal=dico['continVal']
+ continProb=dico['continProb']
+ position=dico['position']
+ timeVect=dico['timeVect']
+ LawsList = dico['CorrMatrix']['laws']
+ all_inputs_init = dico['all_inputs_init']
+ AdjLoadTables = dico['AdjLoadTables']
+
+
+ #initializations
+ Output=[]
+ LS=[]
+ FS=[]
+ Pmachine=[]
+ LStable=[]
+ FStable=[]
+
+ LS_beforeUC=[]
+ FS_beforeUC=[]
+ Pmachine_beforeUC=[]
+ LStable_beforeUC=[]
+ FStable_beforeUC=[]
+ Output_beforeUC = []
+
+ outputSampleAll=NumericalSample(0,9)
+ inputSample=[]
+ redirect.psse2py()
+ #import pssdb
+ psspy.psseinit(80000)
+
+ # Silent execution of PSSe
+ islct=6 # 6=no output; 1=standard
+ psspy.progress_output(islct)
+
+
+ x_copy = []
+ for ite in range(len(x)):
+ xite = []
+ for j in range(len(x[ite])):
+ xite.append(x[ite][j])
+ x_copy.append(xite)
+
+
+ for ite in range(len(x)):
+
+ position+=1
+ os.chdir(doc_base) #to work in right directory of the package
+ # Load data from PSSe
+ psspy.case(doc_base+'/BaseCase.sav') #Launching of PSSE and opening the working file
+ all_inputs_base=read_sav(doc_base+'/BaseCase.sav')
+ buses_base=all_inputs_base[0]
+ lines_base=all_inputs_base[1]
+ transf_base=all_inputs_base[2]
+ plants_base=all_inputs_base[3]
+ loads_base=all_inputs_base[4]
+ shunt_base=all_inputs_base[5]
+ motors_base=all_inputs_base[6]
+ transf3_base=all_inputs_base[7]
+ swshunt_base=all_inputs_base[8]
+ #Calculate Losses:
+ P_load = 0
+ for load in loads_base:
+ P_load += load[1]
+ for motor in motors_base:
+ P_load+= motor[1]
+ P_gen = 0
+ for gen in plants_base:
+ busnum = gen[0]
+ genid = gen[2].strip()
+ pgen = gen[3]
+ P_gen+=pgen
+ Losses = P_gen - P_load
+ LossesRatio = (Losses/P_load)*1.25 #overestimate losses to avoid surpassing swing bus capacity after economic dispatch
+ doci=os.path.join(doc_base,"Case_"+str(position)+".sav")
+ doci_beforeUC = os.path.join(doc_base,"Case_beforeUC_" + str(position) + ".sav")
+ psspy.save(doci)
+ # Total initial (fixed) shunt on buses
+ init_shunt = 0
+ for i in range(len(shunt_base)) :
+ init_shunt += float(shunt_base[i][2])
+ # Configuration de l'OPF a partir des parametres de l'utilisateur
+ TapChange = 1-int(dico['PSSEParams']['LOCK_TAPS']) #0 if locked, 1 if stepping
+ psspy.report_output(6,"",[0,0]) #6=no output
+ if PSSEParams['ALGORITHM']=='Optimum Power Flow':
+ if Debug:
+ print 'Got to OPF parametrization'
+ logfile = os.path.join(doc_base,r"""DETAIL""")
+ psspy.produce_opf_log_file(1,logfile)
+ psspy.opf_fix_tap_ratios(1-TapChange) #0 : do not fix transformer tap ratios
+ psspy.minimize_fuel_cost(int(dico['PSSEParams']['FUEL_COST']))
+ psspy.minimize_adj_bus_shunts(int(dico['PSSEParams']['MVAR_COST']))
+ psspy.minimize_load_adjustments(int(dico['PSSEParams']['LOADSHEDDING_COST']))
+ #psspy.minimize_load_adjustments(False) #block load adjustments during application of laws
+ #psspy.initial_opf_barrier_coeff(100)
+ #psspy.final_opf_barrier_coeff(0.0001)
+ #psspy.opf_step_length_tolerance(0.00001)
+ #psspy.opf_fix_all_generators(0)
+ psspy.set_opf_report_subsystem(3,1)
+ psspy.solution_parameters_4([PSSEParams['ITERATION_LIMIT'],PSSEParams['ITERATION_LIMIT'],PSSEParams['ITERATION_LIMIT'],_i,_i], [_f]*19)
+ #[1.6, 1.6, 1, 0.0001, 1, 1, 1, 0.00001, 5, 0.7, 0.0001, 0.005, 1, 0.05, 0.99, 0.99, 1, 0.0001, 100])
+
+ else: #economic dispatch
+ ecd_file = PSSEParams['ecd_file']
+ # 1. Affiche
+ nx = len(x[0])
+ if TStest==1 :
+ for i,law in enumerate(LawsList):
+ if Xt[ite][i] == -1 :
+ if law != 'N_1_fromFile':
+ if 'Availability' in dico['Laws'][law]['Type']:
+ status = int(round(x[ite][i])) #idealement on a tiré un chiffre entre 0 et 1, 0 et 1 inclus
+ status = min(status,1) #on force status à avoir une valeur 0 ou 1
+ status = max(status,0)
+ x_copy[ite][i]=status
+ if dico['Laws'][law]['ComponentType']=='Generator' and 'Level' in dico['Laws'][law]['Type']:
+ if dico['Laws'][law]['TransferFunction']==True:
+ if dico['Laws'][law]['TF_Input']=='.pow file':
+ z_WS = dico['Laws'][law]['Wind_Speed_Measurement_Height']
+ pathWT = dico['Laws'][law]['File_Name']
+ HH = dico['Laws'][law]['Hub_Height']
+ alpha = dico['Laws'][law]['AlphaWS']
+ PercentLoss = dico['Laws'][law]['Percent_Losses']
+ x_copy[ite][i]=eol(np.array([x[ite][i]]), z_WS, pathWT, HH, alpha, PercentLoss)[0]
+ elif dico['Laws'][law]['TF_Input']=='tuples list':
+ x_copy[ite][i]=applyTF(x[ite][i], dico['Laws'][law]['TF_Values'])
+ else: #ensure values are between 0 and 1
+ Pval = x[ite][i]
+ Pval = min(Pval,1)
+ Pval = max(Pval,0)
+ x_copy[ite][i]=Pval
+ else: #law=='N_1_fromFile"
+ x_copy[ite][i]==int(floor(x[ite][i]))
+
+ else:
+ x_copy[ite][i]=float(Xt[ite][i]) # Dans le cas d'une etude temporelle on lui donne la valeur de Xt
+
+ else :
+ for i,law in enumerate(LawsList):
+ if law != 'N_1_fromFile':
+ if 'Availability' in dico['Laws'][law]['Type']:
+ status = int(round(x[ite][i])) #idealement on a tiré un chiffre entre 0 et 1, 0 et 1 inclus
+ status = min(status,1) #on force status à avoir une valeur 0 ou 1
+ status = max(status,0)
+ x_copy[ite][i]=status
+ if dico['Laws'][law]['ComponentType']=='Generator' and 'Level' in dico['Laws'][law]['Type']:
+ if dico['Laws'][law]['TransferFunction']==True:
+ if dico['Laws'][law]['TF_Input']=='.pow file':
+ z_WS = dico['Laws'][law]['Wind_Speed_Measurement_Height']
+ pathWT = dico['Laws'][law]['File_Name']
+ HH = dico['Laws'][law]['Hub_Height']
+ alpha = dico['Laws'][law]['AlphaWS']
+ PercentLoss = dico['Laws'][law]['Percent_Losses']
+ x_copy[ite][i]=eol(np.array([x[ite][i]]), z_WS, pathWT, HH, alpha, PercentLoss)[0]
+ #x_copy[ite][i]=x[ite][i]
+ elif dico['Laws'][law]['TF_Input']=='tuples list':
+ x_copy[ite][i]=applyTF(x[ite][i], dico['Laws'][law]['TF_Values'])
+ else: #ensure values are between 0 and 1
+ Pval = x[ite][i]
+ Pval = min(Pval,1)
+ Pval = max(Pval,0)
+ x_copy[ite][i]=Pval
+ else: #law=='N_1_fromFile"
+ x_copy[ite][i]==int(floor(x[ite][i]))
+ inputSample.append(np.array(x[ite]))
+
+ if PSSEParams['ALGORITHM']=='Optimum Power Flow':
+ #get OPF data
+ allbus=1
+ include = [1,1,1,1] #isolated buses, out of service branches, subsystem data, subsystem tie lines
+ out = 0 #out to file, not window
+ # if psspy.bsysisdef(0):
+ # sid = 0
+ # else: # Select subsytem with all buses
+ # sid = -1
+ sid = 3
+ RopFile = os.path.join(dico['doc_base'],"BaseCase.rop" )
+ AlreadyRop=os.path.isfile(RopFile)
+ if not AlreadyRop:
+ ierr = psspy.rwop(sid,allbus,include,out,RopFile) #write rop file
+
+ GenDispatchData, DispTableData, LinCostTables, QuadCostTables, PolyCostTables, GenReserveData, PeriodReserveData,AdjBusShuntData,AdjLoadTables = readOPFdata(RopFile)
+
+ if Debug:
+ print "Starting application of laws"
+
+ # 2. Fait le calcul avec PSSE
+
+ #Editing some values in the PSSE .sav input file
+ x2 = [] #list to store sampled values for logger function
+ for i,law in enumerate(LawsList):
+ if law != 'N_1_fromFile':
+
+ #Reserve Constraint Law: change level of required reserve for a period reserve constraint
+ if dico['Laws'][law]['ComponentType']=='Reserve Constraint':
+ ReserveID = dico['Laws'][law]['ReserveID']
+ ReserveFound = False
+ ReserveActive=False
+ for PRD in PeriodReserveData:
+ if PRD[0] == ReserveID:
+ ReserveFound=True
+ ReserveActive=PRD[3]
+ if not ReserveFound:
+ print 'ALERT: ReserveID ', str(ReserveID), ' is not found. User must define period reserve in .sav file before incluing a distribution on the reserve constraint in PSEN.'
+ elif not ReserveActive:
+ print 'ALERT: Spinning Reserve Correction entered in PSEN, but ReserveID ', str(ReserveID), ' is not activated in PSS/E.'
+ else:
+ status=_i #enabled/not enabled
+ level=x_copy[ite][i] #MW
+ timeframe = _f #minutes
+ psspy.opf_perrsv_main(ReserveID,status,[level, timeframe]) #change reserve constraint level
+ x2.append(x_copy[ite][i]) #store values for logger function
+
+ # Load Law: change the values of the different loads and treat large changes of load to help convergence
+ #if dico['Laws'][law]['ComponentType']=='Load' and ('N_1' not in law) and ('out' not in law.lower()):
+ if dico['Laws'][law]['ComponentType']=='Load' and ('Availability' not in dico['Laws'][law]['Type']):
+ LoadList = dico['Laws'][law]['Load']
+ if x_copy[ite][i] > 0.75 : # On change directement l(es) charge(s)
+ for LoadName in LoadList:
+ busNum = dico['Loads'][LoadName]['NUMBER']
+ ID = dico['Loads'][LoadName]['ID']
+ P = dico['Loads'][LoadName]['P']
+ Q = dico['Loads'][LoadName]['Q']
+ psspy.load_chng_4(busNum,ID,[_i,_i,_i,_i,_i,_i],[x_copy[ite][i]*P,x_copy[ite][i]*Q,_f,_f,_f,_f])
+
+ elif x_copy[ite][i] > 0.4 : # On effectue un pretraitement en passant par une charge intermediaire
+ for LoadName in LoadList:
+ busNum = dico['Loads'][LoadName]['NUMBER']
+ ID = dico['Loads'][LoadName]['ID']
+ P = dico['Loads'][LoadName]['P']
+ Q = dico['Loads'][LoadName]['Q']
+ psspy.load_chng_4(busNum,ID,[_i,_i,_i,_i,_i,_i],[(1+x_copy[ite][i])/2*P,(1+x_copy[ite][i])/2*Q,_f,_f,_f,_f])
+ if PSSEParams['ALGORITHM']=='Optimum Power Flow':
+ if Debug:
+ print 'OPF load 1'
+ psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
+ psspy.set_opf_report_subsystem(3,0)
+ psspy.nopf(0,1) # Lancement OPF
+ postOPFinitialization(doci,all_inputs_init,AdjLoadTables,init_gen=False,init_bus=False,init_fxshnt=True,init_swshnt=False,init_load=True,init_P0=False)
+ #psspy.fnsl([0, _i, 0, 0, 0, 0, _i,_i]) # Load flow Newton Raphson
+ else:
+ if Debug:
+ print "Economic Dispatch load 1"
+ #economic dispatch
+ EcdErrorCodes, LFcode, Plimit, Qlimit = EconomicDispatch(doci, ecd_file, LossesRatio, TapChange)
+ if Debug:
+ print "Returned from EconomicDispatch"
+ if np.any(np.array(EcdErrorCodes)!=0):
+ print "Error in economic dispatch."
+ for LoadName in LoadList : # On change toutes les charges
+ busNum = dico['Loads'][LoadName]['NUMBER']
+ ID = dico['Loads'][LoadName]['ID']
+ P = dico['Loads'][LoadName]['P']
+ Q = dico['Loads'][LoadName]['Q']
+ psspy.load_chng_4(busNum,ID,[_i,_i,_i,_i,_i,_i],[x_copy[ite][i]*P,x_copy[ite][i]*Q,_f,_f,_f,_f])
+
+ else : # On effectue un pretraitement en passant par une charge intermediaire
+ for LoadName in LoadList:
+ busNum = dico['Loads'][LoadName]['NUMBER']
+ ID = dico['Loads'][LoadName]['ID']
+ P = dico['Loads'][LoadName]['P']
+ Q = dico['Loads'][LoadName]['Q']
+ psspy.load_chng_4(busNum,ID,[_i,_i,_i,_i,_i,_i],[0.7*P,0.7*Q,_f,_f,_f,_f])
+
+ if PSSEParams['ALGORITHM']=='Optimum Power Flow':
+ if Debug:
+ print 'OPF load 2a'
+ psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
+ psspy.set_opf_report_subsystem(3,0)
+ psspy.nopf(0,1) # Lancement OPF
+ postOPFinitialization(doci,all_inputs_init,AdjLoadTables,init_gen=False,init_bus=False,init_fxshnt=True,init_swshnt=False,init_load=True,init_P0=False)
+ #psspy.fnsl([0, _i, 0, 0, 0, 0, _i,_i]) # Load flow Newton Raphson
+ else:
+ if Debug:
+ print "Economic Dispatch load 2"
+ #economic dispatch
+ EcdErrorCodes, LFcode, Plimit, Qlimit = EconomicDispatch(doci, ecd_file, LossesRatio, TapChange)
+ if np.any(np.array(EcdErrorCodes)!=0):
+ print "Error in economic dispatch."
+
+ for LoadName in LoadList : # On change toutes les charges
+ busNum = dico['Loads'][LoadName]['NUMBER']
+ ID = dico['Loads'][LoadName]['ID']
+ P = dico['Loads'][LoadName]['P']
+ Q = dico['Loads'][LoadName]['Q']
+ psspy.load_chng_4(busNum,ID,[_i,_i,_i,_i,_i,_i],[0.4*P,0.4*Q,_f,_f,_f,_f])
+ if PSSEParams['ALGORITHM']=='Optimum Power Flow':
+ if Debug:
+ print 'OPF load 2b'
+ psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
+ psspy.set_opf_report_subsystem(3,0)
+ psspy.nopf(0,1) # Lancement OPF
+ postOPFinitialization(doci,all_inputs_init,AdjLoadTables,init_gen=False,init_bus=False,init_fxshnt=True,init_swshnt=False,init_load=True,init_P0=False)
+ #psspy.fnsl([0, _i, 0, 0, 0, 0, _i,_i]) # Load flow Newton Raphson
+ else:
+ #economic dispatch
+ EcdErrorCodes, LFcode, Plimit, Qlimit = EconomicDispatch(doci, ecd_file, LossesRatio, TapChange)
+ if np.any(np.array(EcdErrorCodes)!=0):
+ print "Error in economic dispatch."
+ if Debug:
+ print "Economic Dispatch load 2"
+ for LoadName in LoadList : # On change toutes les charges
+ busNum = dico['Loads'][LoadName]['NUMBER']
+ ID = dico['Loads'][LoadName]['ID']
+ P = dico['Loads'][LoadName]['P']
+ Q = dico['Loads'][LoadName]['Q']
+ psspy.load_chng_4(busNum,ID,[_i,_i,_i,_i,_i,_i],[x_copy[ite][i]*P,x_copy[ite][i]*Q,_f,_f,_f,_f])
+ x2.append(x_copy[ite][i]) #store values sampled for logger function
+ # Motor Load Law: change the values of the different induction motor loads and treat large changes of load to help convergence
+ #if dico['Laws'][law]['ComponentType']=='Motor' and ('N_1' not in law) and ('out' not in law.lower()):
+ if dico['Laws'][law]['ComponentType']=='Motor' and ('Availability' not in dico['Laws'][law]['Type']):
+ MotorList = dico['Laws'][law]['Motor']
+ if x_copy[ite][i] > 0.75 : # On change directement l(es) charge(s)
+ for MotorName in MotorList:
+ busNum = dico['Motors'][MotorName]['NUMBER']
+ ID = dico['Motors'][MotorName]['ID']
+ Mbase = dico['Motors'][MotorName]['MBASE']
+ BaseCode = dico['Motors'][MotorName]['BASECODE']
+ Pinit = dico['Motors'][MotorName]['P']
+ Qinit = dico['Motors'][MotorName]['Q']
+ if BaseCode==2: #max is in MVA
+ PF = Pinit/((Pinit**2+Qinit**2)**0.5)
+ Pmax = PF*Mbase
+ else:
+ Pmax = Mbase
+ I_list = [_i]*9
+ F_list = [_f]*23
+ F_list[2]=x_copy[ite][i]*Pmax
+ psspy.induction_machine_chng(busNum,ID,I_list,F_list)
+
+ elif x_copy[ite][i] > 0.4 : # On effectue un pretraitement en passant par une charge intermediaire
+ for MotorName in MotorList:
+ busNum = dico['Motors'][MotorName]['NUMBER']
+ ID = dico['Motors'][MotorName]['ID']
+ Mbase = dico['Motors'][MotorName]['MBASE']
+ BaseCode = dico['Motors'][MotorName]['BASECODE']
+ Pinit = dico['Motors'][MotorName]['P']
+ Qinit = dico['Motors'][MotorName]['Q']
+ if BaseCode==2: #max is in MVA
+ PF = Pinit/((Pinit**2+Qinit**2)**0.5)
+ Pmax = PF*Mbase
+ else:
+ Pmax = Mbase
+ I_list = [_i]*9
+ F_list = [_f]*23
+ F_list[2]=x_copy[ite][i]*Pmax*0.7
+ psspy.induction_machine_chng(busNum,ID,I_list,F_list)
+ if PSSEParams['ALGORITHM']=='Optimum Power Flow':
+ if Debug:
+ print 'OPF motor load 1'
+ psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
+ psspy.set_opf_report_subsystem(3,0)
+ psspy.nopf(0,1) # Lancement OPF
+ postOPFinitialization(doci,all_inputs_init,AdjLoadTables,init_gen=False,init_bus=False,init_fxshnt=True,init_swshnt=False,init_load=True,init_P0=False)
+ #psspy.fnsl([0, _i, 0, 0, 0, 0, _i,_i]) # Load flow Newton Raphson
+ else:
+ #economic dispatch
+ EcdErrorCodes, LFcode, Plimit, Qlimit = EconomicDispatch(doci, ecd_file, LossesRatio, TapChange)
+ if np.any(np.array(EcdErrorCodes)!=0):
+ print "Error in economic dispatch."
+
+ for MotorName in MotorList:
+ busNum = dico['Motors'][MotorName]['NUMBER']
+ ID = dico['Motors'][MotorName]['ID']
+ Mbase = dico['Motors'][MotorName]['MBASE']
+ BaseCode = dico['Motors'][MotorName]['BASECODE']
+ Pinit = dico['Motors'][MotorName]['P']
+ Qinit = dico['Motors'][MotorName]['Q']
+ if BaseCode==2: #max is in MVA
+ PF = Pinit/((Pinit**2+Qinit**2)**0.5)
+ Pmax = PF*Mbase
+ else:
+ Pmax = Mbase
+ I_list = [_i]*9
+ F_list = [_f]*23
+ F_list[2]=x_copy[ite][i]*Pmax
+ psspy.induction_machine_chng(busNum,ID,I_list,F_list)
+
+ else : # On effectue un pretraitement en passant par une charge intermediaire
+ for MotorName in MotorList:
+ busNum = dico['Motors'][MotorName]['NUMBER']
+ ID = dico['Motors'][MotorName]['ID']
+ Mbase = dico['Motors'][MotorName]['MBASE']
+ BaseCode = dico['Motors'][MotorName]['BASECODE']
+ Pinit = dico['Motors'][MotorName]['P']
+ Qinit = dico['Motors'][MotorName]['Q']
+ if BaseCode==2: #max is in MVA
+ PF = Pinit/((Pinit**2+Qinit**2)**0.5)
+ Pmax = PF*Mbase
+ else:
+ Pmax = Mbase
+ I_list = [_i]*9
+ F_list = [_f]*23
+ F_list[2]=x_copy[ite][i]*Pmax*0.7
+ psspy.induction_machine_chng(busNum,ID,I_list,F_list)
+ if PSSEParams['ALGORITHM']=='Optimum Power Flow':
+ if Debug:
+ print 'OPF motor load 2a'
+ psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
+ psspy.set_opf_report_subsystem(3,0)
+ psspy.nopf(0,1) # Lancement OPF
+ postOPFinitialization(doci,all_inputs_init,AdjLoadTables,init_gen=False,init_bus=False,init_fxshnt=True,init_swshnt=False,init_load=True,init_P0=False)
+ #psspy.fnsl([0, _i, 0, 0, 0, 0, _i,_i]) # Load flow Newton Raphson
+ else:
+ #economic dispatch
+ EcdErrorCodes, LFcode, Plimit, Qlimit = EconomicDispatch(doci, ecd_file, LossesRatio, TapChange)
+ if np.any(np.array(EcdErrorCodes)!=0):
+ print "Error in economic dispatch."
+
+ for MotorName in MotorList:
+ busNum = dico['Motors'][MotorName]['NUMBER']
+ ID = dico['Motors'][MotorName]['ID']
+ Mbase = dico['Motors'][MotorName]['MBASE']
+ BaseCode = dico['Motors'][MotorName]['BASECODE']
+ Pinit = dico['Motors'][MotorName]['P']
+ Qinit = dico['Motors'][MotorName]['Q']
+ if BaseCode==2: #max is in MVA
+ PF = Pinit/((Pinit**2+Qinit**2)**0.5)
+ Pmax = PF*Mbase
+ else:
+ Pmax = Mbase
+ I_list = [_i]*9
+ F_list = [_f]*23
+ F_list[2]=x_copy[ite][i]*Pmax*0.4
+ psspy.induction_machine_chng(busNum,ID,I_list,F_list)
+ if PSSEParams['ALGORITHM']=='Optimum Power Flow':
+ if Debug:
+ print 'OPF motor load 2b'
+ psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
+ psspy.set_opf_report_subsystem(3,0)
+ psspy.nopf(0,1) # Lancement OPF
+ postOPFinitialization(doci,all_inputs_init,AdjLoadTables,init_gen=False,init_bus=False,init_fxshnt=True,init_swshnt=False,init_load=True,init_P0=False)
+ #psspy.fnsl([0, _i, 0, 0, 0, 0, _i,_i]) # Load flow Newton Raphson
+ else:
+ #economic dispatch
+ EcdErrorCodes, LFcode, Plimit, Qlimit = EconomicDispatch(doci, ecd_file, LossesRatio, TapChange)
+ if np.any(np.array(EcdErrorCodes)!=0):
+ print "Error in economic dispatch."
+
+ for MotorName in MotorList:
+ busNum = dico['Motors'][MotorName]['NUMBER']
+ ID = dico['Motors'][MotorName]['ID']
+ Mbase = dico['Motors'][MotorName]['MBASE']
+ BaseCode = dico['Motors'][MotorName]['BASECODE']
+ Pinit = dico['Motors'][MotorName]['P']
+ Qinit = dico['Motors'][MotorName]['Q']
+ if BaseCode==2: #max is in MVA
+ PF = Pinit/((Pinit**2+Qinit**2)**0.5)
+ Pmax = PF*Mbase
+ else:
+ Pmax = Mbase
+ I_list = [_i]*9
+ F_list = [_f]*23
+ F_list[2]=x_copy[ite][i]*Pmax
+ psspy.induction_machine_chng(busNum,ID,I_list,F_list)
+ x2.append(x_copy[ite][i]) #store values sampled for logger function
+ # Generator Law : Change generation level
+ #if dico['Laws'][law]['ComponentType']=='Generator' and ('N_1' not in law) and ('out' not in law.lower()):
+ if dico['Laws'][law]['ComponentType']=='Generator' and ('Availability' not in dico['Laws'][law]['Type']):
+ GenList = dico['Laws'][law]['Generator']
+ for GenName in GenList:
+ busNum = dico['Generators'][GenName]['NUMBER']
+ ID = dico['Generators'][GenName]['ID']
+ Pmax = dico['Generators'][GenName]['PMAX']
+ Pmin = dico['Generators'][GenName]['PMIN']
+ if Pmin < 0 and abs(Pmin) > Pmax: #motor, not generator
+ psspy.machine_chng_2(busNum,ID,[_i,_i,_i,_i,_i,_i],\
+ [x_copy[ite][i]*Pmin,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
+ else: #generator
+ psspy.machine_chng_2(busNum,ID,[_i,_i,_i,_i,_i,_i],\
+ [x_copy[ite][i]*Pmax,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
+ x2.append(x_copy[ite][i]) #store values sampled for logger function
+ #Line or Transformer Availability Law: disconnect component if sample=0
+ elif dico['Laws'][law]['ComponentType']=='Line' or dico['Laws'][law]['ComponentType']=='Transformer':
+ compType = dico['Laws'][law]['ComponentType']
+ CompList = dico['Laws'][law][compType]
+
+ for Name in CompList:
+ from_bus = dico[compType + 's'][Name]['FROMNUMBER']
+ to_bus = dico[compType+ 's'][Name]['TONUMBER']
+
+ ID = dico[compType+ 's'][Name]['ID']
+ if compType=='Line':
+ psspy.branch_chng(from_bus,to_bus,ID,[x_copy[ite][i],_i,_i,_i,_i,_i],\
+ [_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
+ elif compType=='Transformer':
+ if dico[compType+ 's'][Name]['#WIND']==2:
+ i_args = [_i]*15
+ i_args[0]=status
+ f_args = [_f]*24
+ c_args = [_s]*2
+ psspy.two_winding_chng_4(from_bus,to_bus,ID,i_args,f_args,c_args)
+ elif dico[compType+ 's'][Name]['#WIND']==3:
+ three_bus = dico[compType + 's'][Name]['3NUMBER']
+ i_args = [_i]*12
+ i_args[7]=status
+ f_args = [_f]*17
+ c_args = [_s]*2
+ psspy.three_wnd_imped_chng_3(from_bus,to_bus,three_bus,ID,i_args,f_args,c_args)
+ x2.append(x_copy[ite][i]) #store values sampled for logger function
+
+ #Generator or Load or Motor Availability Law: disconnect component if sample = 0
+ #elif (dico['Laws'][law]['ComponentType']=='Generator' and ('N_1' in law or 'out' in law.lower())) or\
+ # (dico['Laws'][law]['ComponentType']=='Load' and ('N_1' in law or 'out' in law.lower())) or\
+ # (dico['Laws'][law]['ComponentType']=='Motor' and ('N_1' in law or 'out' in law.lower())):
+ elif (dico['Laws'][law]['ComponentType']=='Generator' and ('Availability' in dico['Laws'][law]['Type'])) or\
+ (dico['Laws'][law]['ComponentType']=='Load' and ('Availability' in dico['Laws'][law]['Type'])) or\
+ (dico['Laws'][law]['ComponentType']=='Motor' and ('Availability' in dico['Laws'][law]['Type'])):
+ compType = dico['Laws'][law]['ComponentType']
+ CompList = dico['Laws'][law][compType]
+
+ for Name in CompList:
+ busNum = dico[compType + 's'][Name]['NUMBER']
+ ID = dico[compType + 's'][Name]['ID']
+ if compType=='Generator':
+ psspy.machine_chng_2(busNum,ID,[x_copy[ite][i],_i,_i,_i,_i,_i],\
+ [_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
+ elif compType=='Load':
+ psspy.load_chng_4(busNum,ID,[x_copy[ite][i],_i,_i,_i,_i,_i],[_f,_f,_f,_f,_f,_f])
+
+ elif compType=='Motor':
+ psspy.induction_machine_chng(busNum,ID,[x_copy[ite][i],_i,_i,_i,_i,_i,_i,_i,_i],[_f]*23)
+ x2.append(x_copy[ite][i]) #store values sampled for logger function
+
+ #N-1 from file : systematic disconnection of a component
+ else: #law='N_1_fromFile'
+ if x_copy[ite][i]<0:
+ x2.append("")
+ pass
+ elif x_copy[ite][i] < len(continLines) : # L'element tire est une ligne
+
+ line_num=int(x_copy[ite][i])
+ line_name=continLines[int(line_num)]
+
+ from_bus=dico['Lines'][line_name]['FROMNUMBER']
+ to_bus=dico['Lines'][line_name]['TONUMBER']
+ br_id=dico['Lines'][line_name]['ID']
+ psspy.branch_chng(from_bus,to_bus,br_id,[0,_i,_i,_i,_i,_i],\
+ [ _f, _f, _f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
+ x2.append('Line '+str(from_bus)+'-'+str(to_bus)+'#'+str(br_id))
+
+ elif x_copy[ite][i] < (len(continLines)+len(continGroups)) :
+
+ group_num = int(x_copy[ite][i])-len(continLines)
+ group_name = continGroups[int(group_num)]
+ bus_num = dico['Generators'][group_name]['NUMBER']
+ bus_id = dico['Generators'][group_name]['ID']
+ psspy.machine_chng_2(int(bus_num),str(bus_id),[0,_i,_i,_i,_i,_i],\
+ [_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f]) # Disconnect component
+ psspy.opf_gendsp_indv(int(bus_num),str(bus_id),_i,0.0)
+ x2.append('Group '+str(bus_num)+'#'+str(bus_id))
+
+ elif x_copy[ite][i] < (len(continLines)+len(continGroups)+len(continTransfos)) :
+ transfo_num=int(x_copy[ite][i])-len(continLines)-len(continGroups)
+ transfo_name = continTransfos[int(transfo_num)]
+ from_bus= dico['Transformers'][transfo_name]['FROMNUMBER']
+ to_bus=dico['Transformers'][transfo_name]['TONUMBER']
+ ID=dico['Transformers'][transfo_name]['ID']
+
+ if dico['Transformers'][transfo_name]['#WIND']==2:
+ i_args = [_i]*15
+ i_args[0]=0
+ f_args = [_f]*24
+ c_args = [_s]*2
+ psspy.two_winding_chng_4(from_bus,to_bus,ID,i_args,f_args,c_args)
+ x2.append('Transfo '+str(from_bus)+'-'+str(to_bus)+'#'+str(ID))
+
+ elif dico['Transformers'][transfo_name]['#WIND']==3:
+ three_bus = dico['Transformers'][transfo_name]['3NUMBER']
+ i_args = [_i]*12
+ i_args[7]=0
+ f_args = [_f]*17
+ c_args = [_s]*2
+ psspy.three_wnd_imped_chng_3(from_bus,to_bus,three_bus,ID,i_args,f_args,c_args)
+ x2.append('Transfo '+str(from_bus)+'-'+str(to_bus)+'-'+str(three_bus)+'#'+str(ID))
+
+ elif x_copy[ite][i] < (len(continLines)+len(continGroups)+len(continTransfos)+len(continLoads)) :
+
+ load_num = int(x_copy[ite][i])-len(continLines)-len(continGroups)-len(continTransfos)
+ load_name = continLoads[int(load_num)]
+ bus_num = dico['Loads'][load_name]['NUMBER']
+ ID = dico['Loads'][load_name]['ID']
+ psspy.load_chng_4(int(bus_num),str(ID),[0,_i,_i,_i,_i,_i],[_f,_f,_f,_f,_f,_f]) # Disconnect component
+ x2.append('Load '+str(bus_num)+'#'+str(ID))
+
+ elif x_copy[ite][i] < (len(continLines)+len(continGroups)+len(continTransfos)+len(continLoads)+len(continMotors)) :
+ motor_num = int(x_copy[ite][i])-len(continLines)-len(continGroups)-len(continTransfos)-len(continLoads)
+ motor_name = continMotors[int(motor_num)]
+ bus_num = dico['Motors'][motor_name]['NUMBER']
+ ID = dico['Motors'][motor_name]['ID']
+ psspy.induction_machine_chng(int(bus_num),str(ID),[0,_i,_i,_i,_i,_i,_i,_i,_i],[_f]*23) # Disconnect component
+ x2.append('Motor '+str(bus_num)+'#'+str(ID))
+ else :
+ pass
+
+ psspy.save(doci) #Saving .sav modifications
+
+ if PSSEParams['ALGORITHM']=='Optimum Power Flow':
+ #save OPF data
+ allbus=1
+ include = [1,1,1,1] #isolated buses, out of service branches, subsystem data, subsystem tie lines
+ out = 0 #out to file, not window
+ # if psspy.bsysisdef(0):
+ # sid = 0
+ # else: # Select subsytem with all buses
+ # sid = -1
+ sid = 3
+ RopFile = os.path.join(dico['doc_base'],"BaseCase.rop" )
+ AlreadyRop=os.path.isfile(RopFile)
+ if not AlreadyRop:
+ ierr = psspy.rwop(sid,allbus,include,out,RopFile) #write rop file
+
+ ok = True
+
+ if Debug:
+ print "Finished applying laws"
+ loadShed = []
+ fxshnt = []
+ indexLS = []
+ indexFS = []
+ indicLS = 0
+ indicFS = 0
+ xstrings = ['mvaact']
+ ierr, xdata1 = psspy.aloadcplx(-1, 1, xstrings)
+ istrings = ['number']
+ ierr, idata = psspy.aloadint(-1, 1, istrings)
+ cstrings = ['name']
+ ierr, cdata = psspy.aloadchar(-1, 1, cstrings)
+ bistrings = ['number']
+ ierr, bidata1 = psspy.afxshuntint(-1, 1, bistrings)
+ bxstrings = ['shuntnom']
+ ierr, bxdata1 = psspy.afxshuntcplx(-1, 1, bxstrings)
+ bcstrings = ['id']
+ ierr, bcdata1 = psspy.afxshuntchar(-1, 1, bcstrings)
+ #Unit commitment pass only valid for OPF (economic dispatch turns on and off generators)
+ ##=========================================================================#
+ if PSSEParams['ALGORITHM']=='Optimum Power Flow':
+ # First OPF to disconnect all generators at P=0
+ if dico['UnitCommitment']:
+ #increase load by reserve level so that after unit commitment there are enough groups to provide reserve
+ GenDispatchData, DispTableData, LinCostTables, QuadCostTables, PolyCostTables, GenReserveData, PeriodReserveData,AdjBusShuntData,AdjLoadTables = readOPFdata(RopFile)
+ ReserveFound=False
+ TotalReserveLevel = 0
+ AllReserveActive = []
+ for num in range(1,16): #16 potential reserves defined in OPF
+ keyname = 'SpinningReserveID_'+str(int(num))
+ if PSSEParams.has_key(keyname):
+ ReserveID = PSSEParams[keyname]
+ for PRD in PeriodReserveData:
+ if PRD[0]==ReserveID:
+ ReserveFound=True
+ ReserveActive = PRD[3]
+ ReserveLevel = PRD[1]
+ AllReserveActive.append(ReserveActive)
+ TotalReserveLevel += ReserveActive*ReserveLevel
+ #print('Total Reserve = ', str(TotalReserveLevel))
+ if ReserveFound and any(AllReserveActive):
+ outputs = read_sav(doci)
+ loads = outputs[4]
+ total_load = 0
+ for load in loads:
+ total_load += load[1]
+
+ x_with_reserve = (total_load + TotalReserveLevel)/total_load
+ x_remove_reserve = 1.0/x_with_reserve
+ for load in loads:
+ busNum = load[0]
+ ID = load[5]
+ P = load[1]
+ Q = load[2]
+ psspy.load_chng_4(busNum,ID,[_i,_i,_i,_i,_i,_i],[x_with_reserve*P,x_with_reserve*Q,_f,_f,_f,_f])
+
+ #set Pmin so necessary units to supply reserve are not disconnected
+ if ReserveCorrection:
+ NoDisconnectionAllowedTotal = []
+ for res in PeriodReserveData:
+ ResNum = res[0]
+ ResLevel = res[1]
+ ResPeriod = res[2]
+ InService = res[3]
+ if InService == 0:
+ continue
+ ParticipatingUnits = res[4]
+ ParticipatingUnitsFull = []
+ NoDisconnectionAllowed = []
+ for unit in ParticipatingUnits:
+ busNum = unit[0]
+ ID = unit[1]
+
+ for gen in GenReserveData:
+ busNum2 = gen[0]
+ ID2 = gen[1]
+ if busNum==busNum2 and ID == ID2:
+ ramp =gen[2]
+ #Pmax = gen[3]
+ break
+
+ for gen in GenDispatchData:
+ busNum3 = gen[0]
+ ID3 = gen[1]
+ if busNum==busNum3 and ID == ID3:
+ dispatch = gen[2]
+ dispTable = gen[3]
+ break
+
+ for dTable in DispTableData:
+ dispTable2 = dTable[0]
+ if dispTable == dispTable2:
+ PmaxTable = dTable[1]
+ Pmax = PmaxTable #take Pmax from dispatch table to avoid errors
+ PminTable = dTable[2]
+ FuelCostScaleCoef = dTable[3]
+ CurveType = dTable[4] #2 = piece wise linear,
+ Status = dTable[5]
+ CostTable = dTable[6]
+ break
+
+ for table in LinCostTables:
+ CostTable2 = table[0]
+ if CostTable2==CostTable:
+ numpoints = table[1]
+ points = table[2]
+ break
+
+ MaxContribution = min(ResPeriod * ramp, Pmax)
+
+ for i,[x_,y_] in enumerate(points):
+ if x_ > Pmax:
+ x1 = x_
+ y1 = y_
+ x0 = points[i-1][0]
+ y0 = points[i-1][1]
+ break
+ y_i = (y1 - y0)*Pmax/(x1-x0)
+
+ if Pmax > 0:
+ CostCoef = y_i / Pmax
+ else:
+ #pdb.set_trace()
+ CostCoef = 0
+
+ ParticipatingUnitsFull.append([busNum, ID, Pmax, dispTable, MaxContribution, CostCoef])
+
+ ParticipatingUnitsFull.sort(key=lambda d: d[-1], reverse=False)
+ ReserveCapability = 0
+
+ for unit in ParticipatingUnitsFull:
+ MaxContribution = unit[4]
+ if ReserveCapability >= ResLevel:
+ break
+ else:
+ ReserveCapability += MaxContribution
+ dispTable = unit[3]
+ Pmax = unit[2]
+ busNum = unit[0]
+ ID = unit[1]
+ NoDisconnectionAllowed.append([busNum, ID])
+ Pmin = (DisconnectThreshhold*1.1)*Pmax
+ psspy.opf_apdsp_tbl(dispTable,[_i,_i,_i],[_f, Pmin,_f])
+
+ for grp in NoDisconnectionAllowed:
+ if grp not in NoDisconnectionAllowedTotal:
+ NoDisconnectionAllowedTotal.append(grp)
+
+ else:
+ pass
+
+ #psspy.minimize_load_adjustments(int(dico['PSSEParams']['LOADSHEDDING_COST'])) #now apply load shedding
+ #save new load levels to be able to initialize after opf run
+ psspy.save(doci) #Saving .sav modifications
+ all_inputs_base=read_sav(doci)
+ loads_base=all_inputs_base[4]
+ all_inputs_init_i =[]
+ for h, inputs in enumerate(all_inputs_init):
+ if h != 4:
+ all_inputs_init_i.append(inputs)
+ else:
+ all_inputs_init_i.append(loads_base)
+
+
+ if PSSEParams['ALGORITHM']=='Optimum Power Flow':
+ # First OPF to disconnect all generators at P=0
+ if dico['UnitCommitment']:
+
+ if Debug:
+ print "principal OPF before unit commitment"
+
+ loadShed = []
+ fxshnt = []
+
+ indexLS = []
+ indexFS = []
+
+ indicLS = 0
+ indicFS = 0
+
+ xstrings = ['mvaact']
+ ierr, xdata1 = psspy.aloadcplx(-1, 1, xstrings)
+ istrings = ['number']
+ ierr, idata = psspy.aloadint(-1, 1, istrings)
+ cstrings = ['name']
+ ierr, cdata = psspy.aloadchar(-1, 1, cstrings)
+
+ bistrings = ['number']
+ ierr, bidata1 = psspy.afxshuntint(-1, 1, bistrings)
+ bxstrings = ['shuntnom']
+ ierr, bxdata1 = psspy.afxshuntcplx(-1, 1, bxstrings)
+ bcstrings = ['id']
+ ierr, bcdata1 = psspy.afxshuntchar(-1, 1, bcstrings)
+
+ psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
+ psspy.set_opf_report_subsystem(3,0)
+ psspy.nopf(0,1) # Lancement OPF
+
+ ok = False
+ flagLS = 0
+ flagFS = 0
+
+ # solved() => check if last solution attempt reached tolerance
+ # 0 = met convergence tolerance
+ # 1 = iteration limit exceeded
+ # 2 = blown up
+ # 3 = terminated by non-divergent option
+ # 4 = terminated by console input
+ # 5 = singular jacobian matrix or voltage of 0.0 detected
+ # 6 = inertial power flow dispatch error (INLF)
+ # 7 = OPF solution met convergence tolerance (NOPF)
+ # 8 does not exist ?
+ # 9 = solution not attempted
+
+ if psspy.solved() == 7 or psspy.solved()==0:
+ pass
+ else: #run OPF in loop to attempt convergence
+ postOPFinitialization(doci,all_inputs_init_i,AdjLoadTables,init_gen=False,init_bus=False,init_fxshnt=True,init_swshnt=False,init_load=True,init_P0=False)
+ MAX_OPF = 5 # 5 = Nombre de lancement max de l'OPF pour atteindre la convergence de l'algorithme
+ for nbeOPF in range(0, MAX_OPF):
+ psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
+ psspy.set_opf_report_subsystem(3,0)
+ psspy.nopf(0,1) # Lancement OPF
+ if psspy.solved()==7 or psspy.solved()==0:
+ break
+ else:
+ postOPFinitialization(doci,all_inputs_init_i,AdjLoadTables,init_gen=False,init_bus=False,init_fxshnt=True,init_swshnt=False,init_load=True,init_P0=False)
+
+ #treat status of OPF
+ if psspy.solved() == 7 or psspy.solved()==0:
+ ok = True
+ elif psspy.solved() == 2:
+ print "OPF diverged. (before Q control)"
+ elif psspy.solved()== 3:
+ print "Terminated by non-divergent option. (before unit commitment)"
+ elif psspy.solved()== 4:
+ print "Terminated by console input. (before Q control)"
+ elif psspy.solved()== 5:
+ print "Singular jacobian matrix or voltage of 0.0 detected. (before unit commitment)"
+ elif psspy.solved()== 6:
+ print "Inertial power flow dispatch error (INLF) (before unit commitment)."
+ elif psspy.solved()== 8:
+ print "Solution does not exist. (before unit commitment)"
+ elif psspy.solved()== 9:
+ print "Solution not attempted. (before unit commitment)"
+ elif psspy.solved == 2:
+ print "OPF diverged. (before unit commitment)"
+ elif psspy.solved() == 1: #if iteration limit exceeded, try load flow
+ print "Iteration limit exceeded (before unit commitment), trying load flow."
+ # Newton-Raphson power flow calculation. Params:
+ # tap adjustment flag (0 = disable / 1 = enable stepping / 2 = enable direct)
+ # area interchange adjustement (0 = disable)
+ # phase shift adjustment (0 = disable)
+ # dc tap adjustment (1 = enable)
+ # switched shunt adjustment (1 = enable)
+ # flat start (0 = default / disabled, 1 = enabled), disabled parce qu'on n'est pas dans une situation de départ
+ # var limit (default = 99, -1 = ignore limit, 0 = apply var limit immediatly)
+ # non-divergent solution (0 = disable)
+ psspy.fnsl([0, _i, 0, 0, 0, 0, _i,_i])
+ if psspy.solved() == 0:
+ ok=True
+ elif psspy.solved() == 2:
+ print "Load flow diverged. (before unit commitment)"
+ if ok:
+ # Returns an "array of complex values for subsystem loads"
+ ierr, xdata2 = psspy.aloadcplx(-1, 1, xstrings) # retrieve load MVA # Renvoie une liste de chaque load en format complexe (P+Q)
+
+ # aFxShuntInt: return an array of integer values for subsystem fixed shunts
+ ierr, bidata2 = psspy.afxshuntint(-1, 1, bistrings)
+
+ # aFxShuntCplx: return an array of complex values for sybsystem fixed shunts
+ ierr, bxdata2 = psspy.afxshuntcplx(-1, 1, bxstrings) # retrieve bus shunt MVar
+
+ #Fixed shunt strings: return array of ids
+ ierr, bcdata2 = psspy.afxshuntchar(-1, 1, bcstrings)
+
+ # Extraction of the load shedding quantities
+ for i in range(len(xdata2[0])):
+ if np.real(xdata1)[0][i] != np.real(xdata2)[0][i]: # np.real returns the real part of the elements in the given array
+ indexLS.append(i)
+ flagLS = 1 # rise flag loadshedding
+ try: # if / else would be better here ?
+ flagLS
+ except:
+ flagLS = 0
+ else:
+ loadShed.append([position]) # Position seems to correspond to the number of the case we are treating
+ loadShed[0].extend(['' for i in range(len(indexLS)-1)]) # why [0] ? Maybe it would be better to have 2 lists ? Or a dict ?
+ loadShed.append([idata[0][i] for i in indexLS])
+ loadShed.append([cdata[0][i] for i in indexLS])
+ loadShed.append([np.real(xdata1)[0][i] - np.real(xdata2)[0][i] for i in indexLS])
+ loadShed.append([np.real(xdata2)[0][i] for i in indexLS])
+ indicLS = sum(loadShed[3]) # sum all Effective MW loads
+ loadShed = zip(*loadShed) # transpose the matrix
+
+ # extraction adj. fixed shunt quantities
+ if len(bidata1[0]) == len(bidata2[0]): # one first opf may have occured...
+ # so we check first if both vectors have the same length
+
+ for i in range(len(bxdata2[0])):
+ if np.imag(bxdata1)[0][i] != np.imag(bxdata2)[0][i]: # search for differences
+ indexFS.append(i)
+ flagFS = 1 # rise flag adj. bus shunt
+ try:
+ flagFS
+ except:
+ flagFS = 0
+ else:
+ bxdata2[0] = [np.imag(bxdata2)[0][i] for i in indexFS] # fulfill output vector
+ bidata2[0] = [bidata1[0][i] for i in indexFS]
+ bcdata2[0] = [bcdata1[0][i] for i in indexFS]
+ g = -1
+ while (g <= len(bidata2)):
+ g += 1
+ try:
+ #if fabs(bxdata2[0][g]) < 1: # discard value in ]-1,1[
+ if fabs(bxdata2[0][g]) < 0.001: # discard value in ]-1,1[
+ # pdb.set_trace()
+ bxdata2[0].pop(g)
+ bidata2[0].pop(g)
+ bcdata2[0].pop(g)
+ g -= 1
+ except: pass
+ if bxdata2[0] != []: # Get all fixed shunt buses
+ fxshnt.append([position])
+ fxshnt[0].extend(['' for i in range(len(bxdata2[0]) - 1)]) # Same here => maybe two lists or a dict would be a better choice
+ fxshnt.append(bidata2[0])
+ fxshnt.append(bxdata2[0])
+ indicFS = sum(fxshnt[2])
+ fxshnt = zip(*fxshnt) # transpose the matrix
+ flagFS = 1
+ else:
+ flagFS = 0
+
+ else: # if not same length, bus data corresponding to the adjustable bus shunt have been added to the vector
+ for i in range(len(bidata1[0])): # remove bus data of bus which are not added after the opf
+ try:
+ bxdata2[0].pop(bxdata2[0].index(bxdata1[0][i]))
+ bidata2[0].pop(bidata2[0].index(bidata1[0][i]))
+ bcdata2[0].pop(bcdata2[0].index(bcdata1[0][i]))
+ except:
+ pass
+ g = -1
+ bx = list(np.imag(bxdata2[0])) # retrieve Mvar
+ while g <= len(bidata2):
+ g += 1
+ try:
+ if fabs(bx[g]) < 1: # discard value in ]-1,1[
+ bx.pop(g)
+ bidata2[0].pop(g)
+ g -= 1
+ except: pass
+ if bx != []:
+ fxshnt.append([position])
+ fxshnt[0].extend(['' for i in range(len(bidata2[0]) - 1)])
+ fxshnt.append(bidata2[0])
+ fxshnt.append(bx)
+ indicFS = sum(fxshnt[2])
+ fxshnt = zip(*fxshnt)
+ flagFS = 1
+ else:
+ flagFS = 0
+
+
+ if PSSEParams['SAVE_CASE_BEFORE_UNIT_COMMITMENT']:
+ psspy.save(doci_beforeUC)
+ all_inputs = read_sav(doci_beforeUC)
+ psspy.save(doci)
+ all_inputs = read_sav(doci)
+
+ buses = all_inputs[0]
+ lines = all_inputs[1]
+ transf = all_inputs[2]
+ plants = all_inputs[3]
+ loads = all_inputs[4]
+ shunt = all_inputs[5]
+ motors = all_inputs[6]
+ transf3 = all_inputs[7]
+ swshunt = all_inputs[8]
+
+
+ gen_UC_list = []
+ for item in plants:
+ bus = item[0]
+ status = item[1]
+ _id = item[2]
+ pgen = item[3]
+ qgen = item[4]
+ pmax = item[6]
+ name = item[7]
+ machine_type = item[11]
+
+ #and if a conventional generating unit as specified in Machines tab of PSSE
+ if machine_type == 0:
+ if abs(pgen) <= pmax*DisconnectThreshhold:
+ if status==1:
+ #print('P < 5% of Pmax and Q > 0 at bus ' + str(bus) + ' gen ' + str(_id) + '--> generator disconnected.')
+ # disconnect the plant
+ pgen=0
+ qgen=0
+ status = 0
+ psspy.machine_chng_2(bus, _id, [status,_i,_i,_i,_i,_i],[pgen,qgen,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
+ gen_UC_list.append((bus,_id))
+ elif machine_type==1: #renewable generator fixed Q limits
+ if abs(pgen) <= pmax*0.2 and DEWA_PV_Qlimits: #change q limits if P renewable is < 20% Pmax (DEWA grid code)
+ if status==1:
+ qmin = -0.04*pmax
+ qmax = 0.04*pmax
+ qgen=min(qmax,qgen)
+ qgen=max(qmin,qgen)
+ psspy.machine_chng_2(bus, _id, [_i,_i,_i,_i,_i,_i],[_f,qgen,qmax,qmin,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
+ if abs(pgen) <= pmax*0.005 and Disconnect_RES: #disconnect if very low P
+ if status==1:
+ #print('P < 5% of Pmax and Q > 0 at bus ' + str(bus) + ' gen ' + str(_id) + '--> generator disconnected.')
+ # disconnect the plant
+ pgen=0
+ qgen=0
+ status = 0
+ psspy.machine_chng_2(bus, _id, [status,_i,_i,_i,_i,_i],[pgen,qgen,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
+ gen_UC_list.append((bus,_id))
+ elif machine_type==2: #renewable generator with cos phi control
+ if abs(pgen) <= pmax*0.005 and Disconnect_RES: #disconnect if very low P
+ if status==1:
+ #print('P < 5% of Pmax and Q > 0 at bus ' + str(bus) + ' gen ' + str(_id) + '--> generator disconnected.')
+ # disconnect the plant
+ pgen=0
+ qgen=0
+ status = 0
+ psspy.machine_chng_2(bus, _id, [status,_i,_i,_i,_i,_i],[pgen,qgen,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
+ gen_UC_list.append((bus,_id))
+ elif machine_type==3: #renewable generator with fixed Q based on cos phi control
+ if abs(pgen) <= pmax*0.005 and Disconnect_RES: #disconnect if very low P
+ if status==1:
+ #print('P < 5% of Pmax and Q > 0 at bus ' + str(bus) + ' gen ' + str(_id) + '--> generator disconnected.')
+ # disconnect the plant
+ pgen=0
+ qgen=0
+ status = 0
+ psspy.machine_chng_2(bus, _id, [status,_i,_i,_i,_i,_i],[pgen,qgen,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
+ gen_UC_list.append((bus,_id))
+ elif machine_type==4: #infeed machine that's still considered renewable
+ if abs(pgen) <= pmax*0.005 and Disconnect_RES: #disconnect if very low P
+ if status==1:
+ #print('P < 5% of Pmax and Q > 0 at bus ' + str(bus) + ' gen ' + str(_id) + '--> generator disconnected.')
+ # disconnect the plant
+ pgen=0
+ qgen=0
+ status = 0
+ psspy.machine_chng_2(bus, _id, [status,_i,_i,_i,_i,_i],[pgen,qgen,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
+ gen_UC_list.append((bus,_id))
+ # 3. Affiche Y
+ sizeY4 = len(shunt)
+ y_before = np.zeros(2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY3 + 2*sizeY6 + sizeY4 + sizeY8 + 3 * sizeY5 + 3 * sizeY7)
+ z_before = [0]*13 # np.zeros returns a new array of the given shape and type filled with zeros
+ rate_mat_index = Irate_num + 2
+ rate_mat_index_3w = Irate_num + 4
+ Ymac_before = np.zeros(sizeY0)
+ if ok:
+ # Creates the quantities of interest
+ for i in range (sizeY2) :
+ if lines [i][rate_mat_index]>100 :
+ z_before[0]+=1 # Number of lines above 100% of their limits
+ for i in range (sizeY5) :
+ if transf [i][rate_mat_index]>100 :
+ z_before[1]+=1 # Number of transformers above 100% of their limits
+ for i in range (sizeY7) :
+ if transf3 [i][rate_mat_index_3w]>100 :
+ z_before[1]+=1 # Number of transformers above 100% of their limits (each winding of a 3 winding counted)
+
+ for i in range (sizeY1):
+ if buses[i][2]>buses[i][5] :
+ z_before[2]+=1
+ if buses[i][2]<buses[i][4] :
+ z_before[2]+=1 # Number of buses outside of their voltage limits
+ for i in range (sizeY0) :
+ z_before[3]+=float(plants[i][3]) # Total active production
+ for i in range (sizeY3) :
+ z_before[4]+=float(loads[i][1]) # Total active consumption
+ for i in range (sizeY6):
+ z_before[4]+=float(motors[i][1]) # Add motors to total active consumption
+ z_before[5]=(z_before[3]-z_before[4])/z_before[3]*100 # Active power losses
+ for i in range (sizeY2) :
+ if lines [i][rate_mat_index]>z_before[6] :
+ z_before[6]=lines[i][rate_mat_index] # Max flow in lines
+ for i in range (sizeY5) :
+ if transf [i][rate_mat_index]>z_before[7] :
+ z_before[7]=transf[i][rate_mat_index] # Max flow in transformers
+ for i in range (sizeY7) :
+ if transf3 [i][rate_mat_index_3w]>z_before[7] :
+ z_before[7]=transf3[i][rate_mat_index_3w] # Max flow in 3w transformers
+
+ for i in range (sizeY2) :
+ if lines [i][rate_mat_index]>90 :
+ z_before[8]+=1
+ z_before[8]=z_before[8]-z_before[0] # Number of lines between 90% and 100% of their limits
+ for i in range (sizeY5) :
+ if transf [i][rate_mat_index]>90 :
+ z_before[9]+=1
+ for i in range (sizeY7) :
+ if transf3 [i][rate_mat_index_3w]>90 :
+ z_before[9]+=1
+
+ z_before[9]=z_before[9]-z_before[1] # Number of transformers between 90% and 100% of their limits
+
+ z_before[10]=indicFS
+
+ z_before[11]=indicLS
+
+ z_before[12] = str(gen_UC_list)
+
+ # Creates the output vectors
+ for Pmach in range (sizeY0):
+ y_before[Pmach]=float(plants[Pmach][3])
+ Ymac_before[Pmach]=float(plants[Pmach][3])
+ for Qmach in range (sizeY0):
+ y_before[Qmach+sizeY0]=float(plants[Qmach][4])
+ for Vbus in range (sizeY1):
+ y_before[Vbus+2*sizeY0]=float(buses[Vbus][2])
+ for Iline in range (sizeY2):
+ y_before[Iline+2*sizeY0+sizeY1]=float(lines[Iline][rate_mat_index])
+ for Pline in range (sizeY2):
+ y_before[Pline+2*sizeY0+sizeY1+sizeY2]=float(lines[Pline][6])
+ for Qline in range (sizeY2):
+ y_before[Qline+2*sizeY0+sizeY1+2*sizeY2]=float(lines[Qline][7])
+ for Itrans in range (sizeY5):
+ y_before[Itrans+2*sizeY0+sizeY1+3*sizeY2]=float(transf[Itrans][rate_mat_index])
+ for Ptrans in range (sizeY5):
+ y_before[Ptrans+2*sizeY0+sizeY1+3*sizeY2+sizeY5]=float(transf[Ptrans][6])
+ for Qtrans in range (sizeY5):
+ y_before[Qtrans+2*sizeY0+sizeY1+3*sizeY2+2*sizeY5]=float(transf[Qtrans][7])
+ for Itrans in range (sizeY7):
+ y_before[Itrans+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5]=float(transf3[Itrans][rate_mat_index_3w])
+ for Ptrans in range (sizeY7):
+ y_before[Ptrans+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+sizeY7]=float(transf3[Ptrans][8])
+ for Qtrans in range (sizeY7):
+ y_before[Qtrans+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+2*sizeY7]=float(transf3[Qtrans][9])
+ for Pload in range (sizeY3) :
+ y_before[Pload+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+3*sizeY7]=float(loads[Pload][1])
+ for Pmotor in range (sizeY6) :
+ y_before[Pmotor+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+3*sizeY7+sizeY3]=float(motors[Pmotor][1])
+ for Qmotor in range (sizeY6) :
+ y_before[Qmotor+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+3*sizeY7+sizeY3+sizeY6]=float(motors[Qmotor][2])
+ for Qshunt in range (sizeY4) :
+ y_before[Qshunt+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+3*sizeY7+sizeY3+2*sizeY6]=float(shunt[Qshunt][2])
+ for Qshunt in range (sizeY8) :
+ y_before[Qshunt+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+3*sizeY7+sizeY3+2*sizeY6+sizeY4]=float(swshunt[Qshunt][4])
+ nz = len(z_before)
+
+ else :
+ print 'NON CONVERGENCE BEFORE UNIT COMMITMENT CASE '+str(position)+' CORE '+str(num_pac)
+ if TStest==1:
+ MyLogger(x2, y_before, z_before, dico['logCSVfilename_UC'][num_pac], timeVect[ite])
+ else:
+ MyLogger(x2, y_before, z_before, dico['logCSVfilename_UC'][num_pac], position)
+
+ #re-initialize OPF for post-unit commitment
+ postOPFinitialization(doci,all_inputs_init_i,AdjLoadTables,init_gen=False,init_bus=False,init_fxshnt=True,init_swshnt=False,init_load=True,init_P0=False)
+ all_inputs = read_sav(doci)
+ loads = all_inputs[4]
+
+ #return load to original level post spinning reserve correction for unit commitment
+ for num in range(1,16):
+ keyname = 'SpinningReserveID_' + str(int(num))
+ if PSSEParams.has_key(keyname):
+ ReserveID = PSSEParams[keyname]
+ ReserveFound=False
+ AllReserveActive = []
+ for PRD in PeriodReserveData:
+ if PRD[0]==ReserveID:
+ ReserveFound=True
+ ReserveActive = PRD[3]
+ AllReserveActive.append(ReserveActive)
+ if ReserveFound and any(AllReserveActive):
+ for load in loads:
+ busNum = load[0]
+ ID = load[5]
+ P = load[1]
+ Q = load[2]
+ psspy.load_chng_4(busNum,ID,[_i,_i,_i,_i,_i,_i],[x_remove_reserve*P,x_remove_reserve*Q,_f,_f,_f,_f])
+ psspy.save(doci)
+ else:
+ break
+
+ #store loadshedding and added MVAR values for before UC
+ loadShed_beforeUC = loadShed
+ fxshnt_beforeUC = fxshnt
+ indexLS_beforeUC = indexLS
+ indexFS_beforeUC = indexFS
+
+ indicLS_beforeUC = indicLS
+ indicFS_beforeUC = indicFS
+
+
+
+ # Unit commitment pass only valid for OPF (economic dispatch turns on and off generators)
+ ##=========================================================================#
+ # nvm on FAIT deux passages, un puis on regarde les groupes P==0 Q!=0, on les déconnecte et on refait l'OPF
+ # Et on log : le % de cas où ça arrive,
+ # Combien de puissance réactive il nous faut en moyenne,
+ # Quelles sont les machines qui font ça
+ loadShed = []
+ fxshnt = []
+ indexLS = []
+ indexFS = []
+ indicLS = 0
+ indicFS = 0
+ flagLS = 0
+ flagFS = 0
+ ok = False
+
+ xstrings = ['mvaact']
+ ierr, xdata1 = psspy.aloadcplx(-1, 1, xstrings)
+ istrings = ['number']
+ ierr, idata = psspy.aloadint(-1, 1, istrings)
+ cstrings = ['name']
+ ierr, cdata = psspy.aloadchar(-1, 1, cstrings)
+ bistrings = ['number']
+ ierr, bidata1 = psspy.afxshuntint(-1, 1, bistrings)
+ bxstrings = ['shuntnom']
+ ierr, bxdata1 = psspy.afxshuntcplx(-1, 1, bxstrings)
+ bcstrings = ['id']
+ ierr, bcdata1 = psspy.afxshuntchar(-1, 1, bcstrings)
+
+ if Debug:
+ print "got to principal OPF/LF"
+
+ #Solve OPF
+ if PSSEParams['ALGORITHM']=='Optimum Power Flow':
+ psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
+ psspy.set_opf_report_subsystem(3,0)
+ psspy.nopf(0,1) # Lancement OPF
+ flagLS = 0
+ flagFS = 0
+ ok = False
+ #psspy.fnsl([0, _i, 0, 0, 0, 0, _i,_i]) # Load flow Newton Raphson
+
+ # solved() => check if last solution attempt reached tolerance
+ # 0 = met convergence tolerance
+ # 1 = iteration limit exceeded
+ # 2 = blown up
+ # 3 = terminated by non-divergent option
+ # 4 = terminated by console input
+ # 5 = singular jacobian matrix or voltage of 0.0 detected
+ # 6 = inertial power flow dispatch error (INLF)
+ # 7 = OPF solution met convergence tolerance (NOPF)
+ # 8 does not exist ?
+ # 9 = solution not attempted
+ if psspy.solved() == 7 or psspy.solved()==0:
+ pass
+ else: #run OPF in loop to attempt convergence
+ postOPFinitialization(doci,all_inputs_init_i,AdjLoadTables,init_gen=False,init_bus=False,init_fxshnt=True,init_swshnt=False,init_load=True,init_P0=False)
+ MAX_OPF = 5 # 5 = Nombre de lancement max de l'OPF pour atteindre la convergence de l'algorithme
+ for nbeOPF in range(0, MAX_OPF):
+ psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
+ psspy.set_opf_report_subsystem(3,0)
+ psspy.nopf(0,1) # Lancement OPF
+ if psspy.solved()==7 or psspy.solved()==0:
+ break
+ else:
+ postOPFinitialization(doci,all_inputs_init_i,AdjLoadTables,init_gen=False,init_bus=False,init_fxshnt=True,init_swshnt=False,init_load=True,init_P0=False)
+
+ #treat OPF status code
+ if psspy.solved() == 7 or psspy.solved() == 0:
+ ok = True
+ elif psspy.solved() == 2:
+ print "OPF diverged."
+ elif psspy.solved()== 3:
+ print "Terminated by non-divergent option."
+ elif psspy.solved()== 4:
+ print "Terminated by console input."
+ elif psspy.solved()== 5:
+ print "Singular jacobian matrix or voltage of 0.0 detected."
+ elif psspy.solved()== 6:
+ print "Inertial power flow dispatch error (INLF)."
+ elif psspy.solved()== 8:
+ print "Solution does not exist."
+ elif psspy.solved()== 9:
+ print "Solution not attempted."
+ elif psspy.solved() == 1: #if iteration limit exceeded, try load flow
+ print "Iteration limit exceeded, trying load flow (CASE " + str(ite) + ")."
+ # Newton-Raphson power flow calculation. Params:
+ # tap adjustment flag (0 = disable / 1 = enable stepping / 2 = enable direct)
+ # area interchange adjustement (0 = disable)
+ # phase shift adjustment (0 = disable)
+ # dc tap adjustment (1 = enable)
+ # switched shunt adjustment (1 = enable)
+ # flat start (0 = default / disabled, 1 = enabled), disabled parce qu'on n'est pas dans une situation de départ
+ # var limit (default = 99, -1 = ignore limit, 0 = apply var limit immediatly)
+ # non-divergent solution (0 = disable)
+ psspy.fnsl([0, _i, 0, 0, 0, 0, _i,_i])
+ if psspy.solved() == 0:
+ ok=True
+ elif psspy.solved() == 2:
+ print "Load flow diverged"
+ # else:
+ # #PlimitList = []
+ # #QlimitList = []
+ # if Debug:
+ # print "Got to principal economic dispatch"
+ # #economic dispatch
+ # EcdErrorCodes, LFcode, Plimit, Qlimit = EconomicDispatch(doci, ecd_file, LossesRatio, TapChange)
+ # #PlimitList.append(Plimit)
+ # #QlimitList.append(Qlimit)
+ # if Debug:
+ # print "Ran principal economic dispatch"
+ # if np.any(np.array(EcdErrorCodes)!=0):
+ # print "Error in economic dispatch."
+ # elif LFcode != 0:
+ # print "Load flow did not converge"
+ # else:
+ # ok = True
+ #
+ # if Plimit == True:
+ # print "Swing generator exceeds active power limits after economic dispatch."
+ # if Qlimit == True:
+ # print "Swing generator exceeds reactive power limits after economic dispatch."
+ if ok==True:
+ ierr, xdata2 = psspy.aloadcplx(-1, 1, xstrings) # retrieve load MVA # Renvoie une liste de chaque load en format complexe (P+Q)
+ ierr, bidata2 = psspy.afxshuntint(-1, 1, bistrings)
+ ierr, bxdata2 = psspy.afxshuntcplx(-1, 1, bxstrings) # retrieve bus shunt MVar
+ ierr, bcdata2 = psspy.afxshuntchar(-1, 1, bcstrings)
+ # Extraction of the load shedding quantities
+ for i in range(len(xdata2[0])):
+ if np.real(xdata1)[0][i] != np.real(xdata2)[0][i]: # np.real returns the real part of the elements in the given array
+ indexLS.append(i)
+ flagLS = 1 # raise flag loadshedding
+ try: # if / else would be better here ?
+ flagLS
+ except:
+ flagLS = 0
+ else:
+ loadShed.append([position]) # Position seems to correspond to the number of the case we are treating
+ loadShed[0].extend(['' for i in range(len(indexLS)-1)]) # why [0] ? Maybe it would be better to have 2 lists ? Or a dict ?
+ loadShed.append([idata[0][i] for i in indexLS])
+ loadShed.append([cdata[0][i] for i in indexLS])
+ loadShed.append([np.real(xdata1)[0][i] - np.real(xdata2)[0][i] for i in indexLS]) #loadShed[3]
+ loadShed.append([np.real(xdata2)[0][i] for i in indexLS]) #loadShed[4]
+ indicLS = sum(loadShed[3]) # sum all Effective MW loads #sum(loadShed[3])
+ loadShed = zip(*loadShed) # transpose the matrix
+
+ # extraction adj. fixed shunt quantities
+ if len(bidata1[0]) == len(bidata2[0]): # one first opf may have occured...
+ # so we check first if both vectors have the same length
+
+ for i in range(len(bxdata2[0])):
+ if np.imag(bxdata1)[0][i] != np.imag(bxdata2)[0][i]: # search for differencies
+ indexFS.append(i)
+ flagFS = 1 # rise flag adj. bus shunt
+ try:
+ flagFS
+ except:
+ flagFS = 0
+ else:
+ bxdata2[0] = [np.imag(bxdata2)[0][i] for i in indexFS] # fulfill output vector
+ bidata2[0] = [bidata1[0][i] for i in indexFS]
+ bcdata2[0] = [bcdata1[0][i] for i in indexFS]
+ g = -1
+ while (g <= len(bidata2)):
+ g += 1
+ try:
+ #if fabs(bxdata2[0][g]) < 1: # discard value in ]-1,1[
+ if fabs(bxdata2[0][g]) < 0.001: # discard value in ]-1,1[
+ # pdb.set_trace()
+ bxdata2[0].pop(g)
+ bidata2[0].pop(g)
+ bcdata2[0].pop(g)
+ g -= 1
+ except: pass
+ if bxdata2[0] != []: # Get all fixed shunt buses
+ fxshnt.append([position])
+ fxshnt[0].extend(['' for i in range(len(bxdata2[0]) - 1)]) # Same here => maybe two lists or a dict would be a better choice
+ fxshnt.append(bidata2[0])
+ fxshnt.append(bxdata2[0])
+ indicFS = sum(fxshnt[2])
+ fxshnt = zip(*fxshnt) # transpose the matrix
+ flagFS = 1
+ else:
+ flagFS = 0
+
+ else: # if not same length, bus data corresponding to the adjusted bus shunt have been added to the vector
+ for i in range(len(bidata1[0])): # remove bus data of bus which are not added after the opf
+ try:
+ bxdata2[0].pop(bxdata2[0].index(bxdata1[0][i]))
+ bidata2[0].pop(bidata2[0].index(bidata1[0][i]))
+ bcdata2[0].pop(bcdata2[0].index(bcdata1[0][i]))
+ except:
+ pass
+ g = -1
+ bx = list(np.imag(bxdata2[0])) # retrieve Mvar
+ while g <= len(bidata2):
+ g += 1
+ try:
+ if fabs(bx[g]) < 1: # discard value in ]-1,1[
+ bx.pop(g)
+ bidata2[0].pop(g)
+ g -= 1
+ except: pass
+ if bx != []:
+ fxshnt.append([position])
+ fxshnt[0].extend(['' for i in range(len(bidata2[0]) - 1)])
+ fxshnt.append(bidata2[0])
+ fxshnt.append(bx)
+ indicFS = sum(fxshnt[2])
+ fxshnt = zip(*fxshnt)
+ flagFS = 1
+ else:
+ flagFS = 0
+
+
+ psspy.save(doci)
+ all_inputs=read_sav(doci)
+ buses = all_inputs[0]
+ lines = all_inputs[1]
+ transf = all_inputs[2]
+ plants = all_inputs[3]
+ loads = all_inputs[4]
+ shunt = all_inputs[5]
+ motors = all_inputs[6]
+ transf3=all_inputs[7]
+ swshunt = all_inputs[8]
+
+ #pdb.set_trace()
+
+ # 3. Affiche Y
+ sizeY4 = len(shunt)
+ y = np.zeros(2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY3 + 2*sizeY6 + sizeY4 + sizeY8 + 3 * sizeY5+ 3 * sizeY7)
+ z = np.zeros(12+ 2*int(PSSEParams['ALGORITHM']=='Economic Dispatch and Power Flow')) # np.zeros returns a new array of the given shape and type filled with zeros
+ rate_mat_index = Irate_num + 2
+ rate_mat_index_3w = Irate_num + 4
+ Ymac = np.zeros(sizeY0)
+ if ok:
+ # Creates the quantities of interest
+ for i in range (sizeY2) :
+ if lines [i][rate_mat_index]>100 :
+ z[0]+=1 # Number of lines above 100% of their limits
+ for i in range (sizeY5) :
+ if transf [i][rate_mat_index]>100 :
+ z[1]+=1 # Number of transformers above 100% of their limits
+ for i in range (sizeY7) :
+ if transf3 [i][rate_mat_index_3w]>100 :
+ z[1]+=1 # Add number of 3w transformers above 100% of their limits
+ for i in range (sizeY1):
+ if buses[i][2]>buses[i][5] :
+ z[2]+=1
+ if buses[i][2]<buses[i][4] :
+ z[2]+=1 # Number of buses outside of their voltage limits
+ for i in range (sizeY0) :
+ z[3]+=float(plants[i][3]) # Total active production
+ for i in range (sizeY3) :
+ z[4]+=float(loads[i][1]) # Total active consumption
+ for i in range (sizeY6) :
+ z[4]+=float(motors[i][1]) # add total active consumption from motors
+ z[5]=(z[3]-z[4])/z[3]*100 # Active power losses
+ for i in range (sizeY2) :
+ if lines [i][rate_mat_index]>z[6] :
+ z[6]=lines[i][rate_mat_index] # Max flow in lines
+ for i in range (sizeY5) :
+ if transf [i][rate_mat_index]>z[7] :
+ z[7]=transf[i][rate_mat_index] # Max flow in transformers
+ for i in range (sizeY7) :
+ #pdb.set_trace()
+ if transf [i][rate_mat_index]>z[7] :
+ z[7]=transf3[i][rate_mat_index_3w] # Max flow in 3w transformers
+ for i in range (sizeY2) :
+ if lines [i][rate_mat_index]>90 :
+ z[8]+=1
+ z[8]=z[8]-z[0] # Number of lines between 90% and 100% of their limits
+ for i in range (sizeY5) :
+ if transf [i][rate_mat_index]>90 :
+ z[9]+=1
+ for i in range (sizeY7) :
+ if transf3 [i][rate_mat_index_3w]>90 :
+ z[9]+=1
+ z[9]=z[9]-z[1] # Number of transformers between 90% and 100% of their limits
+
+ z[10]=indicFS
+
+ z[11]=indicLS
+
+ # if PSSEParams['ALGORITHM']=='Economic Dispatch and Power Flow':
+ # z[12] = int(Plimit)
+ # z[13] = int(Qlimit)
+
+ # Creates the output vectors
+ for Pmach in range (sizeY0):
+ y[Pmach]=float(plants[Pmach][3])
+ Ymac[Pmach]=float(plants[Pmach][3])
+ for Qmach in range (sizeY0):
+ y[Qmach+sizeY0]=float(plants[Qmach][4])
+ for Vbus in range (sizeY1):
+ y[Vbus+2*sizeY0]=float(buses[Vbus][2])
+ for Iline in range (sizeY2):
+ y[Iline+2*sizeY0+sizeY1]=float(lines[Iline][rate_mat_index])
+ for Pline in range (sizeY2):
+ y[Pline+2*sizeY0+sizeY1+sizeY2]=float(lines[Pline][6])
+ for Qline in range (sizeY2):
+ y[Qline+2*sizeY0+sizeY1+2*sizeY2]=float(lines[Qline][7])
+ for Itrans in range (sizeY5):
+ y[Itrans+2*sizeY0+sizeY1+3*sizeY2]=float(transf[Itrans][rate_mat_index])
+ for Ptrans in range (sizeY5):
+ y[Ptrans+2*sizeY0+sizeY1+3*sizeY2+sizeY5]=float(transf[Ptrans][6])
+ for Qtrans in range (sizeY5):
+ y[Qtrans+2*sizeY0+sizeY1+3*sizeY2+2*sizeY5]=float(transf[Qtrans][7])
+ for Itrans in range (sizeY7):
+ y[Itrans+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5]=float(transf3[Itrans][rate_mat_index_3w])
+ for Ptrans in range (sizeY7):
+ y[Ptrans+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+sizeY7]=float(transf3[Ptrans][8])
+ for Qtrans in range (sizeY7):
+ y[Qtrans+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+2*sizeY7]=float(transf3[Qtrans][9])
+ for Pload in range (sizeY3) :
+ y[Pload+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+3*sizeY7]=float(loads[Pload][1])
+ for Pmotor in range (sizeY6) :
+ y[Pmotor+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+3*sizeY7+sizeY3]=float(motors[Pmotor][1])
+ for Qmotor in range (sizeY6) :
+ y[Qmotor+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+3*sizeY7+sizeY3+sizeY6]=float(motors[Qmotor][2])
+ for Qshunt in range (sizeY4) :
+ y[Qshunt+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+3*sizeY7+sizeY3+2*sizeY6]=float(shunt[Qshunt][2])
+ for Qshunt in range (sizeY8) :
+ y[Qshunt+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+3*sizeY7+sizeY3+2*sizeY6+sizeY4]=float(swshunt[Qshunt][4])
+
+ nz = len(z)
+
+ else :
+ print ('NON CONVERGENCE CASE '+str(position)+' CORE '+str(num_pac))
+
+ if dico['UnitCommitment']:
+ Output_beforeUC.append(z_before)#append the output
+ Pmachine_beforeUC.append(Ymac_before)
+ LS_beforeUC.append(indicLS_beforeUC)
+ FS_beforeUC.append(indicFS_beforeUC)
+ LStable_beforeUC.extend(loadShed_beforeUC)
+ FStable_beforeUC.extend(fxshnt_beforeUC)
+
+ Output.append(z)#append the output
+ Pmachine.append(Ymac)
+ LS.append(indicLS)
+ FS.append(indicFS)
+ LStable.extend(loadShed)
+ FStable.extend(fxshnt)
+
+ if TStest==1:
+ MyLogger(x2,y,z,logCSVfilename[num_pac],timeVect[ite])
+ else:
+ MyLogger(x2,y,z,logCSVfilename[num_pac],position) #for each iteration write in the CSV
+
+## if dico['TStest']==1:
+## sys.stdout.close()
+ return inputSample, Output, Pmachine, LS, FS, LStable, FStable, Output_beforeUC, Pmachine_beforeUC, LS_beforeUC, FS_beforeUC, LStable_beforeUC, FStable_beforeUC
+
+## except Exception,e:
+## print e
+## a=[]
+## return a
+
+def create_dist(dico):
+
+ NumLaws = len(dico['Laws']) + int(dico['N_1_fromFile'])
+
+ #Create a correlation matrix as copulas
+ CorrMatrixNames = dico['CorrMatrix']['laws']
+ CorrMatrix = dico['CorrMatrix']['matrix']
+ corr=CorrelationMatrix(NumLaws)#Openturns
+
+ # Create a collection of the marginal distributions
+ collectionMarginals = DistributionCollection(NumLaws)#Openturns
+
+ distributionX = []
+ for i,key in enumerate(CorrMatrixNames):
+ data, [time_serie, time_serie_file] = getUserLaw(dico['Laws'][key])
+ distributionX.append( data )
+ collectionMarginals[i] = Distribution(data)
+
+ #add N_1 components entered as Files
+ if dico['N_1_fromFile']==True:
+ continTuples = []
+ for j in range(len(dico['continVal'])):
+ continTuples.append((dico['continVal'][j],dico['continProb'][j]))
+ data = getUserDefined(continTuples)
+ distributionX.append(data)
+ collectionMarginals[i+1] = Distribution(data)
+ CorrMatrixNames.append('N_1_fromFile')
+ CorrMatrixEx = np.hstack((CorrMatrix, np.zeros((NumLaws-1,1)))) #assume no correlation between N-1 and other laws
+ LastLine = np.hstack((np.zeros((1,NumLaws-1)),np.ones((1,1))))
+ CorrMatrixEx = np.vstack((CorrMatrixEx, LastLine))
+ CorrMatrix = CorrMatrixEx
+ (Nrows, Ncols) = np.shape(CorrMatrixEx)
+ else:
+ (Nrows, Ncols) = np.shape(CorrMatrix)
+ for i in range(Nrows):
+ for j in range(Ncols):
+ corr[i,j]=CorrMatrix[i,j]
+
+ corr2= NormalCopula.GetCorrelationFromSpearmanCorrelation(corr)
+ copula=Copula(NormalCopula(corr2))
+ #copula=Copula(NormalCopula(corr))
+
+ # Create the input probability distribution, args are the distributions, the correlation laws
+ inputDistribution = ComposedDistribution(collectionMarginals, copula)
+
+ return inputDistribution
+
+def Calculation(dico,data1,msg):
+
+
+ os.chdir(dico['doc_base']) #to work in correct directory
+## sys.stdout=open('process num'+str(os.getpid())+'_package '+\
+## str(dico['num_pac'])+'.out','w')
+ #pdb.set_trace()
+ flag2=dico['flag2']
+ inputDistribution=create_dist(dico) #create new distribution
+ #initialization
+ LStable=[]
+ FStable=[]
+ output=[]
+ inputSample=[]
+ Pmachine=[]
+
+ LStable_beforeUC=[]
+ FStable_beforeUC=[]
+ output_beforeUC=[]
+ Pmachine_beforeUC=[]
+
+ outputSampleAll=NumericalSample(0,12 + 2*int(dico['PSSEParams']['ALGORITHM']=='Economic Dispatch and Power Flow'))
+
+ RandomGenerator.SetSeed(os.getpid())
+ Message=msg.get()
+ print(Message+'=======OK')
+
+
+ while(Message !='stop'):
+ myMCE = MonteCarloExperiment(inputDistribution,dico['lenpac']) #create new sample
+ inputSamp = myMCE.generate()
+
+ try:
+ Message=msg.get(block=False)
+ if Message=='stop': break
+ except:
+ pass
+ res=PSSEFunct(dico.copy(),inputSamp) #launch PSSEFunct (OPF)
+ #subprocess.Popen(['c:/python34/python.exe','PFfunction.py'])
+ dico['position']+=dico['lenpac']
+ # 0 1 2 3 4 5 6
+ #inputSample, Output, Pmachine, LS, FS, LStable, FStable,
+ # 7 8 9 10 11 12
+ #Output_beforeUC, Pmachine_beforeUC, LS_beforeUC, FS_beforeUC, LStable_beforeUC, FStable_beforeUC
+ for result in res[1]:
+ outputSampleAll.add(NumericalPoint(result)) #create a Numerical Sample variable
+
+ if (flag2):
+ LS=(np.mean(res[3])) #mean per package
+ FS=(np.mean(res[4])) #mean per package
+ z=[LS,FS]
+ data1.put(z)
+ sleep(1)
+
+ #if criteria on nbeTension and NbeTransit
+ else:
+ NbeTransit=(float(NumericalPoint(1,outputSampleAll.computeMean()[0])[0])) #mean per package
+ NbeTension=(float(NumericalPoint(1,outputSampleAll.computeMean()[1])[0]))
+ z=[NbeTransit,NbeTension]
+ data1.put(z)
+ sleep(1)
+
+ inputSample.extend(res[0])
+
+ LStable.extend(res[5])
+ FStable.extend(res[6])
+ output.extend(res[1])
+ Pmachine.extend(res[2])
+
+ LStable_beforeUC.extend(res[11])
+ FStable_beforeUC.extend(res[12])
+ output_beforeUC.extend(res[7])
+ Pmachine_beforeUC.extend(res[8])
+
+ if msg.empty():
+ Message = "empty"
+ else:
+ Message=msg.get(block=True,timeout=2)
+ print 'MSG is '+str(Message)+' time: '+str(strftime("%Hh%Mm%S", gmtime()))
+
+# sys.stdout.close()
+
+ ## #write summary tables for before UC
+ if dico['UnitCommitment']:
+ f=open(dico['logCSVfilename_UC'][dico['num_pac']],'a')
+ f.write("\n Summary Table for MW Load Adjustments;;;;;;;;Summary Table for Added Shunt (Mvar)\n")
+ f.write("Iteration;;Bus Number;Name;Load Shed;Remaining Load;;;Iteration;;Bus Number;Final \n")
+ for i in range(max(len(LStable_beforeUC),len(FStable_beforeUC))):
+ try:
+ f.write('{0};;{1};{2};{3};{4}'.format(LStable_beforeUC[i][0],LStable_beforeUC[i][1]\
+ ,LStable_beforeUC[i][2],LStable_beforeUC[i][3],LStable_beforeUC[i][4]))
+ except:
+ f.write(';;;;;')
+
+ try:
+ f.write(';;;{0};;{1};{2} \n'.format(FStable_beforeUC[i][0],FStable_beforeUC[i][1],FStable_beforeUC[i][2]))
+ except:
+ f.write('\n')
+ f.write("\n\n")
+ f.close()
+
+ ## #write summary tables
+ f=open(dico['logCSVfilename'][dico['num_pac']],'a')
+ f.write("\n Summary Table for MW Load Adjustments;;;;;;;;Summary Table for Added Shunt (Mvar)\n")
+ f.write("Iteration;;Bus Number;Name;Load Shed;Remaining Load;;;Iteration;;Bus Number;Final \n")
+ for i in range(max(len(LStable), len(FStable))):
+ try:
+ f.write('{0};;{1};{2};{3};{4}'.format(LStable[i][0],LStable[i][1]\
+ ,LStable[i][2],LStable[i][3],LStable[i][4]))
+ except:
+ f.write(';;;;;')
+ try:
+ f.write(';;;{0};;{1};{2} \n'.format(FStable[i][0],FStable[i][1],FStable[i][2]))
+ except:
+ f.write('\n')
+ f.write("\n\n")
+ f.close()
+
+
+
+ return output, inputSample,Pmachine
+
+class NonBlockingStreamReader(): #class object to read in a stdout process
+
+ def __init__(self, stream):
+ '''
+ stream: the stream to read from.
+ Usually a process' stdout or stderr.
+ '''
+
+ self._s = stream
+ self._q = Queue()
+
+ def _populateQueue(stream, queue):
+ '''
+ Collect lines from 'stream' and put them in 'queue'.
+ '''
+
+ while True:
+ line = stream.read()
+ if line:
+ queue.put(line)
+ else:
+ pass
+
+ self._t = Thread(target = _populateQueue,
+ args = (self._s, self._q))
+ self._t.daemon = True
+ self._t.start() #start collecting lines from the stream
+
+ def readline(self, timeout = None):
+ try:
+ return self._q.get(block = timeout is not None,
+ timeout = timeout)
+ except Empty:
+ return None
+
+
+def Convergence(data2,msg,OPF,nb_fix,cmd_Path):
+
+ LS=[]
+ FS=[]
+ MoyTension=[]
+ MoyTransit=[]
+ MoyCumuLS=[]
+ MoyCumuFS=[]
+ NbeTension=[]
+ NbeTransit=[]
+ Ind1=[]
+ Ind2=[]
+ ind=1
+ t=0
+ p=subprocess.Popen(['python',cmd_Path],stdout=subprocess.PIPE) #launch subprocess
+ nbsr=NonBlockingStreamReader(p.stdout) #monitor subprocess stdout
+
+ print 'Calculating convergence criteria\n'
+ while(ind):
+
+ output=nbsr.readline(0.1)
+ if output:
+ print 'Simulation Interrupting.....'
+ break
+
+ for i in range(multiprocessing.cpu_count()*3): #put 'ok' in the queue three times the number of cores
+ msg.put('ok')
+
+ debut=data2.get(block=True)
+ t+=1
+ print 'Package '+str(t)
+
+ if (OPF): #if criteria on Load shed and mvar
+ LS.append(debut[0])
+ FS.append(debut[1])
+
+ MoyCumuLS.append(np.mean(LS[0:t]))
+ MoyCumuFS.append(np.mean(FS[0:t]))
+
+ if t==1:
+ indice1=1
+ indice2=1
+ else:
+ indice1=np.std(MoyCumuLS) #calculate stop criterion for load shedding
+ indice2=np.std(MoyCumuFS) #calculate stop criterion for mvar
+
+ Ind1.append(indice1)
+ Ind2.append(indice2)
+ print 'indicator Load Shedding= '+str(indice1)+';'+' indicator Added Mvar= '+str(indice2)+'\n'
+
+ if (indice1<0.2) and (indice2<0.015) and nb_fix==0:
+ ind=0
+ break
+ elif len(Ind1)==nb_fix:
+ break
+ else:
+ NbeTransit.append(debut[0])
+ NbeTension.append(debut[1])
+
+ MoyTension.append(np.mean(NbeTension[0:len(NbeTension)]))
+ MoyTransit.append(np.mean(NbeTransit[0:len(NbeTransit)]))
+
+ if t==1:
+ indice1=1
+ indice2=1
+ else:
+ indice1=np.std(MoyTension) #calculate stop criterion for tension
+ indice2=np.std(MoyTransit) #calculate stop criterion for transit
+
+ Ind1.append(indice1)
+ Ind2.append(indice2)
+ print 'indicator Nbe Tension= '+str(indice1)+' indicator Transit= '+str(indice2)+'\n'
+
+ if (indice1<0.01) and (indice2<0.01) and nb_fix==0:
+ ind=0
+ break
+ elif len(Ind1)==nb_fix:
+ break
+
+ while msg.empty()==False : #flush the queue
+ msg.get()
+ # print(msg.qsize())
+ for i in range(100): #put a lot of 'stop's in the queue to make all processes stop
+ msg.put_nowait('stop')
+ # print(msg.qsize())
+
+ p.terminate()
+
+ return Ind1,Ind2
--- /dev/null
+# -*- coding: utf-8 -*-
+"""
+Created on Mon Jun 03 15:31:42 2013
+
+@author: B31272
+
+Fonctions de support
+"""
+import os,sys,random,string,time,pickle
+import PSENconfig
+sys.path.append(PSENconfig.Dico['DIRECTORY']['PF_path'])
+os.environ['PATH'] += ';' + os.path.dirname(os.path.dirname(PSENconfig.Dico['DIRECTORY']['PF_path'])) + ';'
+#sys.path.append(PSENconfig.Dico['DIRECTORY']['PF_path'])
+#os.environ['PATH'] = PSENconfig.Dico['DIRECTORY']['PF_path'] + ";"+ os.environ['PATH']
+import powerfactory
+
+
+import numpy as np
+from math import *
+from decimal import *
+from openturns import *
+from time import sleep, strftime, gmtime
+import multiprocessing
+from threading import Thread
+from queue import Queue, Empty
+import pdb
+#===============================================================================
+# DEFINITION DES FONCTIONS - CREATION OF THE FUNCTIONS
+#===============================================================================
+
+
+#to remve a list from a string "['wind 1', 'wind 2', 'charge']" --> ['wind 1', 'wind 2', 'charge']
+def RemoveListfromString(List):
+ List = List.replace("]","")
+ List = List.replace("[","")
+ List = List.replace(")","")
+ List = List.replace("(","")
+ List = List.replace("'","")
+ List = List.replace('"',"")
+ List = List.replace(" ","")
+ List = List.split(",")
+ return List
+
+def RemoveTuplesfromString(TList):
+ TL = RemoveListfromString(TList)
+ nTL = []
+ for i in range(len(TL)/2):
+ nTL.append([TL[2*i],float(TL[2*i+1])])
+ return nTL
+
+def applyTF(x_in, TF):
+
+ X = []
+ P = []
+ for (x,p) in TF:
+ X.append(x)
+ P.append(p)
+
+
+ Pmax=max(P)
+ precision = 0.001
+ #calculate precision of values entered
+ for i in range(len(X)):
+ d1 = Decimal(str(X[i]))
+ d2 = Decimal(str(P[i]))
+ d1expo = d1.as_tuple().exponent
+ d2expo = d2.as_tuple().exponent
+ expo = np.minimum(d1expo,d2expo)
+ precision = min(10**(expo-1),precision)
+
+
+ #change to array type for consistency
+ X = np.array(X)
+ P = np.array(P)
+
+ #interpolate between values so that precise wind speed data doesnt output heavily discretized power levels
+ from scipy import interpolate
+ finterp = interpolate.interp1d(X,P, kind='linear')
+ Xmin = min(X)
+ Xmax = max(X)
+ Xnew = np.arange(Xmin,Xmax,precision)
+ Pnew = finterp(Xnew)
+
+ #calculate power by applying transfer function
+ if x_in >= Xmax-precision:
+ index = len(Pnew)-1
+ elif x_in <= Xmin + precision:
+ index = 0
+ else:
+ index = int(round((x_in-Xmin)/precision))
+ Power = Pnew[index]
+
+ PowerNorm = Power/Pmax #normalize
+
+ return PowerNorm
+
+def eol(WS, z_WS, pathWT, HH, alpha=1./7, PercentLoss = 5):
+
+ '''
+
+ Reconstitute wind production from wind speed histories for a single site.
+
+ syntax:
+ ACPower = ReconstituteWind(WS, z_WS, pathWT, N_turbines, HH, alpha=1./7, PercentLoss=5)
+
+ inputs:
+ WS: numpy array of wind speed measurements to be converted to production values
+ z_WS: height, in meters above ground level, of the wind speed measurements
+ pathWT: location of selected wind turbine technology's power curve file in computer file system
+ N_turbines: number of wind turbines in the installation/farm being modelled
+ HH: wind turbine hub height
+ alpha (optional, default = 1/7): exponential factor describing the vertical wind profile; used to extrapolate
+ wind data to hub height. Can be scalar or vector with same length as wind data.
+ PercentLoss (optional, default = 5): percent loss due to multiple effects: the wake effect of adjacent wind turbines,
+ cable resistance between wind turbine/farm and injection point, grid and turbine unavailability, extreme weather conditions, etc.
+.
+
+ outputs:
+ ACPower: numpy array of normalized expected wind production for the given wind farm.
+
+ '''
+
+
+ #open and treat wind turbine data in .pow file
+ f = open(pathWT)
+ lines = f.readlines()
+ WTdata = {}
+ WTdata["model"] = lines[0][1:-2]
+ WTdata['diameter'] = float(lines[1][1:-2])
+ WTdata['CutInWindSpeed'] = float(lines[4][1:-2])
+ WTdata['CutOutWindSpeed'] = float(lines[3][1:-2])
+ WTdata['PowerCurve'] = {}
+ WTdata['PowerCurve']['WindSpeed'] = np.arange(0, 31)
+ WTdata['PowerCurve']['Power'] = [float(0)] #in kW
+ for i in range(5,35):
+ WTdata['PowerCurve']['Power'].append(float(lines[i][1:-2]))
+
+ WTdata['Pmax']=max(WTdata['PowerCurve']['Power'])
+
+ #insert WT hub height
+ WTdata['z'] = HH
+
+ #correct wind speed values for appropriate height
+ WS_hh = WS*(WTdata['z']/z_WS)**alpha #wind speed at hub height
+
+ #calculate precision of cut in and cut out windspeeds
+ d1 = Decimal(str(WTdata['CutInWindSpeed']))
+ d2 = Decimal(str(WTdata['CutOutWindSpeed']))
+ expo = np.minimum(d1.as_tuple().exponent, d2.as_tuple().exponent)
+ precision = 10**(expo-1)
+
+ #insert points for cut-in and cut-out wind speeds
+ add_ci = 0
+ add_co= 0
+ if np.mod(WTdata['CutInWindSpeed'],1)==0:
+ add_ci = precision
+ if np.mod(WTdata['CutOutWindSpeed'],1)==0:
+ add_co = precision
+ i_cutin = np.where(WTdata['PowerCurve']['WindSpeed']>(WTdata['CutInWindSpeed']+add_ci))[0][0]
+ i_cutout = np.where(WTdata['PowerCurve']['WindSpeed']>(WTdata['CutOutWindSpeed']+add_co))[0][0] + 1 #+1 to account for addition of cut in point
+ WTdata['PowerCurve']['WindSpeed'] = list(WTdata['PowerCurve']['WindSpeed'])
+ WTdata['PowerCurve']['WindSpeed'].insert(i_cutin, WTdata['CutInWindSpeed']+add_ci)
+ WTdata['PowerCurve']['WindSpeed'].insert(i_cutout, WTdata['CutOutWindSpeed']+add_co)
+ WTdata['PowerCurve']['Power'].insert(i_cutin, 0)
+ WTdata['PowerCurve']['Power'].insert(i_cutout, 0)
+
+ #change to array type for consistency
+ WTdata['PowerCurve']['WindSpeed'] = np.array(WTdata['PowerCurve']['WindSpeed'])
+ WTdata['PowerCurve']['Power'] = np.array(WTdata['PowerCurve']['Power'])
+
+ #interpolate between values so that precise wind speed data doesnt output heavily discretized power levels
+ from scipy import interpolate
+ finterp = interpolate.interp1d(WTdata['PowerCurve']['WindSpeed'],WTdata['PowerCurve']['Power'], kind='linear')
+ Vnew = np.arange(0,30,precision)
+ Pnew = finterp(Vnew)
+
+ #calculate power produced by turbine in function of wind speed
+ Pac_turbine = []
+ for i, ws in enumerate(WS_hh):
+ if ws >= 30-precision:
+ index = len(Pnew)-1
+ else:
+ index = int(round(ws/precision)) #index of correct wind speed
+ Pac_turbine.append(Pnew[index]) #Power corresponds to value with same index as wind speed vector
+ Pac_turbine = np.array(Pac_turbine)
+
+ #account for Losses...currently a single loss factor but could imagine implementing a per-point method
+ #WakeEffects = 4 #3-8% for a typical farm, 0% for an individual windmill
+ #CableResistanceLosses = 1 #1-3% between windmills and electric counter, depending on voltage levels and cable length
+ #GridUnavalability = 1
+ #WTUnavailability = 3
+ #AcousticCurtailment = 1-4
+ #Freezing = 0.5
+ #LF = (1-WakeEffects/100)*(1-CableResistanceLosses/100) #loss factor
+ ACPower = Pac_turbine*(1-PercentLoss/100) #total AC power produced by wind turbine
+ ACPowerNorm = ACPower/WTdata['Pmax']
+ return ACPowerNorm
+
+#Fonction permettant de lire les donnees qui nous interessent et de les mettre dans une matrice
+def read_pfd(app,doc,recal=0):
+ ########################################################
+ # ojectif de cette fonction: prendre les parametres du reseau
+ ########################################################
+ # si recal==1, recalculer loadflow
+ prj = app.GetActiveProject()
+ studycase=app.GetActiveStudyCase()
+ grids=studycase.GetChildren(1,'*.ElmNet',1)[0].contents
+ if recal == 1:#calculer load-flow
+ ldf = app.GetFromStudyCase('ComLdf')
+ ldf.Execute() #run
+ tous=[]
+ for grid in grids:
+ tous.extend(grid.obj_id.GetContents('*.ElmTerm', 1))
+ bus = []
+ for noeud in tous:
+ bus.append(noeud)
+ noeuds = sorted(bus, key=lambda x: x.cStatName)
+ buses = []
+
+ for ii in range(len(noeuds)):
+ if noeuds[ii].HasResults():
+ mu = noeuds[ii].GetAttribute('m:u')
+ mphiu = noeuds[ii].GetAttribute('m:phiu')
+ else :
+ mu = 0
+ mphiu = 0
+ busname = noeuds[ii].cStatName.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ aa = [ii, noeuds[ii].uknom, mu, busname, noeuds[ii].vmin,
+ noeuds[ii].vmax, noeuds[ii].GetBusType(), mphiu,noeuds[ii]]
+ # [numero,nominal KV,magnitude p.u, busname,Vmin,Vmax,type,angle degre,obj]
+ buses.append(aa)
+ ##== == == == == == == == == == = Line===================== Line===================== Line
+ # lignes = app.GetCalcRelevantObjects('*.ElmLne', 0)
+ tous=[]
+ for grid in grids:
+ tous.extend(grid.obj_id.GetContents( '*.ElmLne', 1))
+ lines=[]
+ for line in tous:
+ frombus_name=line.bus1.cBusBar.cStatName
+ frombus_name = frombus_name.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ for ii in range(len(buses)):
+ if frombus_name in buses[ii]:
+ frombus_number=ii
+ break
+ tobus_name=line.bus2.cBusBar.cStatName
+ tobus_name = tobus_name.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ for ii in range(len(buses)):
+ if tobus_name in buses[ii]:
+ tobus_number=ii
+ break
+
+ outs = line.GetChildren(1, 'outserv.Charef', 1)
+ if outs:
+ if outs[0].outserv==0:
+ outserv = outs[0].typ_id.curval
+ else:
+ outserv = line.outserv
+ else:
+ outserv = line.outserv
+ if outserv==1:
+ currentA = 0
+ pourcent = 0
+ flowP = 0
+ flowQ = 0
+ else:
+ currentA=line.GetAttribute('m:I:bus1') #courant en A de la ligne
+ pourcent=line.GetAttribute('c:loading') # taux de charge de la ligne
+ flowP=line.GetAttribute('m:P:bus1')
+ flowQ = line.GetAttribute('m:Q:bus1')
+
+ idline=line.loc_name#line.nlnum
+ aa=[frombus_number,tobus_number,currentA,pourcent,pourcent,pourcent,flowP,flowQ,frombus_name,tobus_name,idline,line]
+ lines.append(aa)
+
+ # 2 windings transformers data (from, to, amps, rate%a, ploss, qloss)==============Transformateur2
+ tous=[]
+ for grid in grids:
+ tous.extend(grid.obj_id.GetContents( '*.ElmTr2', 1))
+ transf=[]
+ for trans in tous:
+ frombus_name=trans.bushv.cBusBar.cStatName
+ frombus_name = frombus_name.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ for ii in range(len(buses)):
+ if frombus_name in buses[ii]:
+ frombus_number=ii
+ break
+ tobus_name=trans.buslv.cBusBar.cStatName
+ tobus_name = tobus_name.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ for ii in range(len(buses)):
+ if tobus_name in buses[ii]:
+ tobus_number=ii
+ break
+
+ outs = trans.GetChildren(1, 'outserv.Charef', 1)
+ if outs:
+ if outs[0].outserv==0:
+ outserv = outs[0].typ_id.curval
+ else:
+ outserv = trans.outserv
+ else:
+ outserv = trans.outserv
+
+ if trans.outserv == 1 or outserv==1:
+ currentA = 0
+ pourcent = 0
+ flowP = 0
+ flowQ = 0
+ else:
+ currentA=trans.GetAttribute('m:I:bushv') #courant en A du bus hv
+ pourcent=trans.GetAttribute('c:loading') # taux de charge
+ flowP=trans.GetAttribute('m:P:bushv')
+ flowQ = trans.GetAttribute('m:Q:bushv')
+ # idtr=trans.ntnum
+ idtr = trans.loc_name
+ aa=[frombus_number,tobus_number,currentA,pourcent,pourcent,pourcent,flowP,flowQ,frombus_name,tobus_name,idtr,trans]
+ transf.append(aa)
+ #3 windings transformers data (from, to, amps, rate%a, ploss, qloss)==============Transformateur3
+ tous=[]
+ for grid in grids:
+ tous.extend(grid.obj_id.GetContents( '*.ElmTr3', 1))
+ transf3 = []
+ for trans in tous:
+ wind1name = trans.bushv.cBusBar.cStatName
+ wind1name = wind1name.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ for ii in range(len(buses)):
+ if wind1name in buses[ii]:
+ wind1number = ii
+ break
+ wind2name = trans.busmv.cBusBar.cStatName
+ wind2name = wind2name.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ for ii in range(len(buses)):
+ if wind2name in buses[ii]:
+ wind2number = ii
+ break
+ wind3name = trans.buslv.cBusBar.cStatName
+ wind3name = wind3name.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ for ii in range(len(buses)):
+ if wind3name in buses[ii]:
+ wind3number = ii
+ break
+ outs = trans.GetChildren(1, 'outserv.Charef', 1)
+ if outs:
+ if outs[0].outserv==0:
+ outserv = outs[0].typ_id.curval
+ else:
+ outserv = trans.outserv
+ else:
+ outserv = trans.outserv
+ if trans.outserv == 1 or outserv==1:
+ currentHV = 0
+ currentMV = 0
+ currentLV = 0
+ pourcenthv = 0
+ pourcentmv = 0
+ pourcentlv = 0
+ flowPh = 0
+ flowPm = 0
+ flowPl = 0
+ flowQh = 0
+ flowQm = 0
+ flowQl = 0
+ else:
+ currentHV = trans.GetAttribute('m:I:bushv') # courant en A du bus hv
+ currentMV = trans.GetAttribute('m:I:busmv') # courant en A du bus mv
+ currentLV = trans.GetAttribute('m:I:buslv') # courant en A du bus lv
+ pourcenthv = trans.GetAttribute('c:loading_h') # taux de charge
+ pourcentmv = trans.GetAttribute('c:loading_m') # taux de charge
+ pourcentlv = trans.GetAttribute('c:loading_l') # taux de charge
+ flowPh = trans.GetAttribute('m:P:bushv')
+ flowPm = trans.GetAttribute('m:P:busmv')
+ flowPl = trans.GetAttribute('m:P:buslv')
+ flowQh = trans.GetAttribute('m:Q:bushv')
+ flowQm = trans.GetAttribute('m:Q:busmv')
+ flowQl = trans.GetAttribute('m:Q:buslv')
+ # idtr3 = trans.nt3nm
+ idtr3 = trans.loc_name
+ aa = [wind1number, wind2number,wind3number,1, currentHV, pourcenthv, pourcenthv, pourcenthv, flowPh, flowQh, wind1name,wind2name,wind3name,idtr3,trans]
+ transf3.append(aa)
+ aa = [wind1number, wind2number, wind3number, 2, currentMV, pourcentmv, pourcentmv, pourcentmv, flowPm, flowQm,
+ wind1name, wind2name, wind3name, idtr3, trans]
+ transf3.append(aa)
+ aa = [wind1number, wind2number, wind3number, 3, currentLV, pourcentlv, pourcentlv, pourcentlv, flowPl, flowQl,
+ wind1name, wind2name, wind3name, idtr3, trans]
+ transf3.append(aa)
+
+ #Machines data (bus, inservice, id, pgen, qgen, mvabase, pmax, qmax, name)==============Generator
+ tous=[]
+ for grid in grids:
+ tous.extend(grid.obj_id.GetContents( '*.ElmSym', 1))
+ plants = []
+ for plant in tous:
+ if plant.i_mot==0:
+ busname=plant.bus1.cBusBar.cStatName
+ busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ for ii in range(len(buses)):
+ if busname in buses[ii]:
+ busnumber = ii
+ break
+ idplant = plant.loc_name#plant.ngnum
+ outs=plant.GetChildren(1, 'outserv.Charef', 1)
+ if outs:
+ if outs[0].outserv == 0:
+ outserv = outs[0].typ_id.curval
+ else:
+ outserv = plant.outserv
+ else:
+ outserv = plant.outserv
+ if plant.outserv == 1 or outserv ==1 :
+ pgen = 0
+ qgen = 0
+ else:
+ pgen = plant.GetAttribute('m:P:bus1')
+ qgen = plant.GetAttribute('m:Q:bus1')
+ sn = plant.GetAttribute('t:sgn')
+ pmax = plant.Pmax_uc
+ # pmax = plant.P_max
+ pmin = plant.Pmin_uc
+ qmax = plant.cQ_max
+ qmin = plant.cQ_min
+ typ = 'ElmSym'
+ aa=[busnumber,plant.outserv,idplant,pgen,qgen,sn,pmax,pmin,busname,pmin,qmin,plant, typ]
+ plants.append(aa)
+ ## __________________ Asynchrone ___________________________
+ tous=[]
+ for grid in grids:
+ tous.extend(grid.obj_id.GetContents( '*.ElmAsm', 1))
+ for plant in tous:
+ if plant.i_mot==0:
+ busname=plant.bus1.cBusBar.cStatName
+ busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ for ii in range(len(buses)):
+ if busname in buses[ii]:
+ busnumber = ii
+ break
+ idplant = plant.loc_name#plant.ngnum
+ outs = plant.GetChildren(1, 'outserv.Charef', 1)
+ if outs:
+ if outs[0].outserv == 0:
+ outserv = outs[0].typ_id.curval
+ else:
+ outserv = plant.outserv
+ else:
+ outserv = plant.outserv
+ if plant.outserv == 1 or outserv==1:
+ pgen=0
+ qgen = 0
+ else:
+ pgen = plant.GetAttribute('m:P:bus1')
+ qgen = plant.GetAttribute('m:Q:bus1')
+ sn = plant.GetAttribute('t:sgn')
+ pmax = plant.Pmax_uc
+ # pmax = plant.P_max
+ pmin = plant.Pmin_uc
+ qmax = plant.cQ_max
+ qmin = plant.cQ_min
+ typ = 'ElmAsm'
+ aa=[busnumber, plant.outserv,idplant,pgen,qgen,sn,pmax,pmin,busname,pmin,qmin,plant,typ]
+ plants.append(aa)
+ ## _______________GenStat ________________
+ tous = []
+ for grid in grids:
+ tous.extend(grid.obj_id.GetContents( '*.ElmGenstat', 1))
+ for plant in tous:
+ busname = plant.bus1.cBusBar.cStatName
+ busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ for ii in range(len(buses)):
+ if busname in buses[ii]:
+ busnumber = ii
+ break
+ idplant = plant.loc_name # plant.ngnum
+ outs = plant.GetChildren(1, 'outserv.Charef', 1)
+ if outs:
+ if outs[0].outserv==0:
+ outserv = outs[0].typ_id.curval
+ else:
+ outserv = plant.outserv
+ else:
+ outserv = plant.outserv
+ if plant.outserv == 1 or outserv == 1:
+ pgen = 0
+ qgen = 0
+ else:
+ pgen = plant.GetAttribute('m:P:bus1')
+ qgen = plant.GetAttribute('m:Q:bus1')
+ sn = plant.GetAttribute('e:sgn')
+ pmax = plant.Pmax_uc
+ # pmax = plant.P_max
+ pmin = plant.Pmin_uc
+ qmax = plant.cQ_max
+ qmin = plant.cQ_min
+ typ = 'ElmGenstat'
+ aa = [busnumber, plant.outserv, idplant, pgen, qgen, sn, pmax, pmin, busname, pmin, qmin,plant, typ]
+ plants.append(aa)
+ ## ____________________ ElmPvsys ______________________________
+ tous = []
+ for grid in grids:
+ tous.extend(grid.obj_id.GetContents( '*.ElmPvsys', 1))
+ for plant in tous:
+ busname = plant.bus1.cBusBar.cStatName
+ busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ for ii in range(len(buses)):
+ if busname in buses[ii]:
+ busnumber = ii
+ break
+ idplant = plant.loc_name # plant.ngnum
+ outs = plant.GetChildren(1, 'outserv.Charef', 1)
+ if outs:
+ if outs[0].outserv==0:
+ outserv = outs[0].typ_id.curval
+ else:
+ outserv = plant.outserv
+ else:
+ outserv = plant.outserv
+ if plant.outserv == 1 or outserv == 1:
+ pgen = 0
+ qgen = 0
+ else:
+ pgen = plant.GetAttribute('m:P:bus1')
+ qgen = plant.GetAttribute('m:Q:bus1')
+ sn = plant.GetAttribute('e:sgn')
+ pmax = plant.Pmax_uc
+ # pmax = plant.P_max
+ pmin = plant.Pmin_uc
+ qmax = plant.cQ_max
+ qmin = plant.cQ_min
+ typ = 'ElmPvsys'
+ aa = [busnumber, plant.outserv, idplant, pgen, qgen, sn, pmax, pmin, busname, pmin, qmin,plant, typ]
+ plants.append(aa)
+ # Motors data (bus, active, reactive, status, name, id)===================== Motor
+ tous=[]
+ for grid in grids:
+ tous.extend(grid.obj_id.GetContents( '*.ElmSym', 1))
+ motors = []
+ for motor in tous:
+ if motor.i_mot == 1:
+ busname = motor.bus1.cBusBar.cStatName
+ busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ for ii in range(len(buses)):
+ if busname in buses[ii]:
+ busnumber = ii
+ break
+ idplant = motor.loc_name#motor.ngnum
+ outs = motor.GetChildren(1, 'outserv.Charef', 1)
+ if outs:
+ if outs[0].outserv == 0:
+ outserv = outs[0].typ_id.curval
+ else:
+ outserv = motor.outserv
+ else:
+ outserv = motor.outserv
+ if plant.outserv == 1 or outserv == 1:
+ pgen = 0
+ qgen = 0
+ else:
+ pgen = motor.GetAttribute('m:P:bus1')
+ qgen = motor.GetAttribute('m:Q:bus1')
+ aa = [busnumber, pgen, qgen, plant.outserv, busname,idplant,motor]
+ motors.append(aa)
+ tous=[]
+ for grid in grids:
+ tous.extend(grid.obj_id.GetContents( '*.ElmAsm', 1))
+ for motor in tous:
+ if motor.i_mot == 1:
+ busname = motor.bus1.cBusBar.cStatName
+ busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ for ii in range(len(buses)):
+ if busname in buses[ii]:
+ busnumber = ii
+ break
+ idplant = motor.loc_name#motor.ngnum
+ outs = motor.GetChildren(1, 'outserv.Charef', 1)
+ if outs:
+ if outs[0].outserv == 0:
+ outserv = outs[0].typ_id.curval
+ else:
+ outserv = motor.outserv
+ else:
+ outserv = motor.outserv
+ # outserv = motor.outserv
+ if outserv == 1:
+ pgen = 0
+ qgen = 0
+ else:
+ pgen = motor.GetAttribute('m:P:bus1')
+ qgen = motor.GetAttribute('m:Q:bus1')
+ aa = [busnumber, pgen, qgen, motor.outserv, busname,idplant,motor]
+ motors.append(aa)
+
+ # Loads data (bus, active, reactive, status, name, id)===================== Load
+ tous=[]
+ for grid in grids:
+ tous.extend(grid.obj_id.GetContents( '*.ElmLod', 1))
+ tous = sorted(tous, key=lambda x: x.bus1.cBusBar.cStatName)
+ loads = []
+ for bus in buses:
+ idload = 0
+ for load in tous:
+ busname = load.bus1.cBusBar.cStatName
+ busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ if busname == bus[3]:
+ idload += 1# cree id pour load
+ busnumber = bus[0]
+ # outserv = load.outserv
+ outs = load.GetChildren(1, 'outserv.Charef', 1)
+ if outs:
+ if outs[0].outserv == 0:
+ outserv = outs[0].typ_id.curval
+ else:
+ outserv = load.outserv
+ else:
+ outserv = load.outserv
+ if load.outserv == 1 or outserv == 1:
+ # if outserv == 1:
+ pload = 0.0
+ qload = 0.0
+ else:
+ pload = load.GetAttribute('m:P:bus1')
+ qload = load.GetAttribute('m:Q:bus1') # qlini_a
+ aa = [busnumber, pload, qload, load.outserv, busname, idload,load]
+ loads.append(aa)
+
+ #Fixed shunt data (number, MVAR, name, ...)========================== Fixed Shunt
+ tous=[]
+ for grid in grids:
+ tous.extend(grid.obj_id.GetContents( '*.ElmShnt', 1))
+ tous = sorted(tous, key=lambda x: x.bus1.cBusBar.cStatName)
+ shunt = []
+ for bus in buses:
+ idshunt = 0
+ for shunt1 in tous:
+ if shunt1.ncapx==1:# nombre de step =1, considerer comme fix shunt pour equivalent a l'ancien programme sous PSSE
+ busname = shunt1.bus1.cBusBar.cStatName
+ busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ if busname == bus[3]:
+ idshunt += 1 # cree id pour load
+ busnumber = bus[0]
+ qnom=shunt1.Qmax
+ outs = shunt1.GetChildren(1, 'outserv.Charef', 1)
+ if outs:
+ if outs[0].outserv == 0:
+ outserv = outs[0].typ_id.curval
+ else:
+ outserv = shunt1.outserv
+ else:
+ outserv = shunt1.outserv
+ if outserv == 1:
+ qshunt = 0
+ else:
+ qshunt = shunt1.GetAttribute('m:Q:bus1') # qlini_a
+ aa = [busnumber, outserv, qshunt, busname,qnom, idshunt,bus,shunt1]
+ shunt.append(aa)
+ # Switched shunt data (number, status,MVAR, name,Qnom,id)================Swiched Shunt
+ swshunt = []
+ for bus in buses:
+ idshunt = 0
+ for shunt1 in tous:
+ if shunt1.ncapx != 1: # nombre de step #1, considerer comme switche shunt pour etre equivalent avec l'ancien programme sous PSSE
+ busname = shunt1.bus1.cBusBar.cStatName
+ busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ if busname == bus[3]:
+ idshunt += 1 # cree id pour load
+ busnumber = bus[0]
+ qnom = shunt1.Qmax
+ outs = shunt1.GetChildren(1, 'outserv.Charef', 1)
+ if outs:
+ if outs[0].outserv == 0:
+ outserv = outs[0].typ_id.curval
+ else:
+ outserv = shunt1.outserv
+ else:
+ outserv = shunt1.outserv
+ if outserv == 1:
+ qshunt = 0
+ else:
+ qshunt = shunt1.GetAttribute('m:Q:bus1') # qlini_a
+ aa = [busnumber, outserv, qshunt, busname, qnom, idshunt,shunt1]
+ swshunt.append(aa)
+
+ return buses, lines, transf, plants, loads, shunt, motors, transf3, swshunt
+
+def read_pfd_simple(app,doc):
+ ########################################################
+ # ojectif de cette fonction: prendre les parametres du reseau
+ ########################################################
+ # si recal==1, recalculer loadflow
+ prj = app.GetActiveProject()
+ studycase=app.GetActiveStudyCase()
+ grids=studycase.GetChildren(1,'*.ElmNet',1)[0].contents
+ # if recal == 1:#calculer load-flow
+ # ldf = app.GetFromStudyCase('ComLdf')
+ # ldf.Execute() #run
+ tous=[]
+ for grid in grids:
+ tous.extend(grid.obj_id.GetContents( '*.ElmTerm', 1))
+ bus = []
+ for noeud in tous:
+ bus.append(noeud)
+ noeuds = sorted(bus, key=lambda x: x.cStatName)
+ buses = []
+ for ii in range(len(noeuds)):
+ busname = noeuds[ii].cStatName.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ aa = [ii, noeuds[ii].uknom, 1, busname, noeuds[ii].vmin,
+ noeuds[ii].vmax, noeuds[ii].GetBusType(), 0,noeuds[ii]]
+ # [numero,nominal KV,magnitude p.u, busname,Vmin,Vmax,type,angle degre,obj]
+ buses.append(aa)
+
+ #Machines data (bus, inservice, id, pgen, qgen, mvabase, pmax, qmax, name)==============Generator
+ tous=[]
+ for grid in grids:
+ tous.extend(grid.obj_id.GetContents( '*.ElmSym', 1))
+ plants = []
+ for plant in tous:
+ if plant.i_mot==0:
+ busname=plant.bus1.cBusBar.cStatName
+ busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ for ii in range(len(buses)):
+ if busname in buses[ii]:
+ busnumber = ii
+ break
+ idplant = plant.loc_name#plant.ngnum
+ outs=plant.GetChildren(1, 'outserv.Charef', 1)
+ if outs:
+ if outs[0].outserv == 0:
+ outserv = outs[0].typ_id.curval
+ else:
+ outserv = plant.outserv
+ else:
+ outserv = plant.outserv
+
+ aa=[busnumber,outserv,idplant,0,0,0,0,0,busname,0,0,plant]
+ plants.append(aa)
+ tous=[]
+ for grid in grids:
+ tous.extend(grid.obj_id.GetContents( '*.ElmAsm', 1))
+ for plant in tous:
+ if plant.i_mot==0:
+ busname=plant.bus1.cBusBar.cStatName
+ busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ for ii in range(len(buses)):
+ if busname in buses[ii]:
+ busnumber = ii
+ break
+ idplant = plant.loc_name#plant.ngnum
+ outs = plant.GetChildren(1, 'outserv.Charef', 1)
+ if outs:
+ if outs[0].outserv == 0:
+ outserv = outs[0].typ_id.curval
+ else:
+ outserv = plant.outserv
+ else:
+ outserv = plant.outserv
+ aa=[busnumber,outserv,idplant,0,0,0,0,0,busname,0,0,plant]
+ plants.append(aa)
+ tous = []
+ for grid in grids:
+ tous.extend(grid.obj_id.GetContents( '*.ElmGenstat', 1))
+ for plant in tous:
+ busname = plant.bus1.cBusBar.cStatName
+ busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ for ii in range(len(buses)):
+ if busname in buses[ii]:
+ busnumber = ii
+ break
+ idplant = plant.loc_name # plant.ngnum
+ outs = plant.GetChildren(1, 'outserv.Charef', 1)
+ if outs:
+ if outs[0].outserv==0:
+ outserv = outs[0].typ_id.curval
+ else:
+ outserv = plant.outserv
+ else:
+ outserv = plant.outserv
+ pgini = plant.pgini
+ pgini_a = plant.pgini_a
+
+ aa = [busnumber, outserv, idplant, 0, 0, 0, 0, 0, busname, 0, 0,plant, pgini, pgini_a]
+ plants.append(aa)
+ tous = []
+ for grid in grids:
+ tous.extend(grid.obj_id.GetContents( '*.ElmPvsys', 1))
+ for plant in tous:
+ busname = plant.bus1.cBusBar.cStatName
+ busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ for ii in range(len(buses)):
+ if busname in buses[ii]:
+ busnumber = ii
+ break
+ idplant = plant.loc_name # plant.ngnum
+ outs = plant.GetChildren(1, 'outserv.Charef', 1)
+ if outs:
+ if outs[0].outserv==0:
+ outserv = outs[0].typ_id.curval
+ else:
+ outserv = plant.outserv
+ else:
+ outserv = plant.outserv
+
+ aa = [busnumber, outserv, idplant, 0, 0, 0, 0, 0, busname, 0, 0,plant]
+ plants.append(aa)
+
+ # Loads data (bus, active, reactive, status, name, id)===================== Load
+ tous=[]
+ for grid in grids:
+ tous.extend(grid.obj_id.GetContents( '*.ElmLod', 1))
+ tous = sorted(tous, key=lambda x: x.bus1.cBusBar.cStatName)
+ loads = []
+ for bus in buses:
+ idload = 0
+ for load in tous:
+ busname = load.bus1.cBusBar.cStatName
+ busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ if busname == bus[3]:
+ idload += 1# cree id pour load
+ busnumber = bus[0]
+ # outserv = load.outserv
+ outs = load.GetChildren(1, 'outserv.Charef', 1)
+ if outs:
+ if outs[0].outserv == 0:
+ outserv = outs[0].typ_id.curval
+ else:
+ outserv = load.outserv
+ else:
+ outserv = load.outserv
+ if outserv == 1:
+ pload = 0
+ qload = 0
+ else:
+ pload = load.plini_a
+ qload = load.qlini_a
+ aa = [busnumber, pload, qload, outserv, busname, idload,load]
+ loads.append(aa)
+
+ #Fixed shunt data (number, MVAR, name, ...)========================== Fixed Shunt
+ tous=[]
+ for grid in grids:
+ tous.extend(grid.obj_id.GetContents( '*.ElmShnt', 1))
+ tous = sorted(tous, key=lambda x: x.bus1.cBusBar.cStatName)
+ shunt = []
+ for bus in buses:
+ idshunt = 0
+ for shunt1 in tous:
+ if shunt1.ncapx==1:# nombre de step =1, considerer comme fix shunt pour equivalent a l'ancien programme sous PSSE
+ busname = shunt1.bus1.cBusBar.cStatName
+ busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ if busname == bus[3]:
+ idshunt += 1 # cree id pour load
+ busnumber = bus[0]
+ qnom=shunt1.Qmax
+ outs = shunt1.GetChildren(1, 'outserv.Charef', 1)
+ if outs:
+ if outs[0].outserv == 0:
+ outserv = outs[0].typ_id.curval
+ else:
+ outserv = shunt1.outserv
+ else:
+ outserv = shunt1.outserv
+ if outserv == 1:
+ qshunt = 0
+ else:
+ qshunt = shunt1.Qact
+ aa = [busnumber, outserv, qshunt, busname,qnom, idshunt,bus,shunt1]
+ shunt.append(aa)
+ # Switched shunt data (number, status,MVAR, name,Qnom,id)================Swiched Shunt
+ swshunt = []
+ for bus in buses:
+ idshunt = 0
+ for shunt1 in tous:
+ if shunt1.ncapx != 1: # nombre de step #1, considerer comme switche shunt pour etre equivalent avec l'ancien programme sous PSSE
+ busname = shunt1.bus1.cBusBar.cStatName
+ busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
+ if busname == bus[3]:
+ idshunt += 1 # cree id pour load
+ busnumber = bus[0]
+ qnom = shunt1.Qmax
+ outs = shunt1.GetChildren(1, 'outserv.Charef', 1)
+ if outs:
+ if outs[0].outserv == 0:
+ outserv = outs[0].typ_id.curval
+ else:
+ outserv = shunt1.outserv
+ else:
+ outserv = shunt1.outserv
+ if outserv == 1:
+ qshunt = 0
+ else:
+ qshunt = shunt1.Qact
+ aa = [busnumber, outserv, qshunt, busname, qnom, idshunt,shunt1]
+ swshunt.append(aa)
+
+ return plants, loads, shunt, swshunt
+
+
+
+#def read_change(app,scn,settriger_iter):
+#######################################################################BEGIN RECUPERE
+# prj = app.GetActiveProject()
+# #s'il y a plusieurs grids activés
+# studycase=app.GetActiveStudyCase()
+# grids=studycase.GetChildren(1,'*.ElmNet',1)[0].contents
+#
+# tous=[]
+# for grid in grids:
+# tous.extend(grid.obj_id.GetContents( '*.ElmTerm', 1))
+# bus = []
+# for noeud in tous:
+# bus.append(noeud)
+# noeuds = sorted(bus, key=lambda x: x.cStatName)
+# buses = []
+# for ii in range(len(noeuds)):
+# aa = [ii, noeuds[ii].uknom, noeuds[ii].GetAttribute('m:u'), noeuds[ii].cStatName, noeuds[ii].vmin,
+# noeuds[ii].vmax, noeuds[ii].GetBusType(), noeuds[ii].GetAttribute('m:phiu'),noeuds[ii]]
+# # [numero,nominal KV,magnitude p.u, busname,Vmin,Vmax,type,angle degre,obj]
+# buses.append(aa)
+# # ##== == == == == == == == == == = Line===================== Line===================== Line
+# # # lignes = app.GetCalcRelevantObjects('*.ElmLne', 0)
+# # tous=[]
+# # for grid in grids:
+# # tous.extend(grid.obj_id.GetContents( '*.ElmLne', 1))
+# # lines=[]
+# # for line in tous:
+# # frombus_name=line.bus1.cBusBar.cStatName
+# # for ii in range(len(buses)):
+# # if frombus_name in buses[ii]:
+# # frombus_number=ii
+# # break
+# # tobus_name=line.bus2.cBusBar.cStatName
+# # for ii in range(len(buses)):
+# # if tobus_name in buses[ii]:
+# # tobus_number=ii
+# # break
+# # currentA=line.GetAttribute('m:I:bus1') #courant en A de la ligne
+# # pourcent=line.GetAttribute('c:loading') # taux de charge de la ligne
+# # flowP=line.GetAttribute('m:P:bus1')
+# # flowQ = line.GetAttribute('m:Q:bus1')
+# # idline=line.loc_name#line.nlnum
+# # aa=[frombus_number,tobus_number,currentA,pourcent,pourcent,pourcent,flowP,flowQ,frombus_name,tobus_name,idline,line]
+# # lines.append(aa)
+#
+# # # 2 windings transformers data (from, to, amps, rate%a, ploss, qloss)
+# # tous=[]
+# # for grid in grids:
+# # tous.extend(grid.obj_id.GetContents( '*.ElmTr2', 1))
+# # transf=[]
+# # for trans in tous:
+# # frombus_name=trans.bushv.cBusBar.cStatName
+# # for ii in range(len(buses)):
+# # if frombus_name in buses[ii]:
+# # frombus_number=ii
+# # break
+# # tobus_name=trans.buslv.cBusBar.cStatName
+# # for ii in range(len(buses)):
+# # if tobus_name in buses[ii]:
+# # tobus_number=ii
+# # break
+# # currentA=trans.GetAttribute('m:I:bushv') #courant en A du bus hv
+# # pourcent=trans.GetAttribute('c:loading') # taux de charge
+# # flowP=trans.GetAttribute('m:P:bushv')
+# # flowQ = trans.GetAttribute('m:Q:bushv')
+# # idline=trans.ntnum
+# # aa=[frombus_number,tobus_number,currentA,pourcent,pourcent,pourcent,flowP,flowQ,frombus_name,tobus_name,idline,trans]
+# # transf.append(aa)
+# # #3 windings transformers data (from, to, amps, rate%a, ploss, qloss)
+# # tous=[]
+# # for grid in grids:
+# # tous.extend(grid.obj_id.GetContents( '*.ElmTr3', 1))
+# # transf3 = []
+# # for trans in tous:
+# # wind1name = trans.bushv.cBusBar.cStatName
+# # for ii in range(len(buses)):
+# # if wind1name in buses[ii]:
+# # wind1number = ii
+# # break
+# # wind2name = trans.busmv.cBusBar.cStatName
+# # for ii in range(len(buses)):
+# # if wind2name in buses[ii]:
+# # wind2number = ii
+# # break
+# # wind3name = trans.buslv.cBusBar.cStatName
+# # for ii in range(len(buses)):
+# # if wind3name in buses[ii]:
+# # wind3number = ii
+# # break
+# # if trans.outserv==1:
+# # currentA = 0 # courant en A du bus hv
+# # pourcent = 0 # taux de charge
+# # flowP =0
+# # flowQ = 0
+# # else:
+# # currentA = trans.GetAttribute('m:I:bushv') # courant en A du bus hv
+# # pourcent = trans.GetAttribute('c:loading') # taux de charge
+# # flowP = trans.GetAttribute('m:P:bushv')
+# # flowQ = trans.GetAttribute('m:Q:bushv')
+# # idline = trans.nt3nm
+# # aa = [wind1number, wind2number,wind3number,3, currentA, pourcent, pourcent, pourcent, flowP, flowQ, wind1name,wind2name,wind3name,idline,trans]
+# # transf3.append(aa)
+#
+# #Machines data (bus, inservice, id, pgen, qgen, mvabase, pmax, qmax, name)==============Generator
+# tous=[]
+# for grid in grids:
+# tous.extend(grid.obj_id.GetContents( '*.ElmSym', 1))
+# plants = []
+# for plant in tous:
+# if plant.i_mot==0:
+# busname=plant.bus1.cBusBar.cStatName
+# for ii in range(len(buses)):
+# if busname in buses[ii]:
+# busnumber = ii
+# break
+# idplant = plant.loc_name#plant.ngnum
+# outserv = plant.outserv
+# if outserv == 1:
+# pgen = 0
+# qgen = 0
+# else:
+# pgen = plant.GetAttribute('m:P:bus1')
+# qgen = plant.GetAttribute('m:Q:bus1')
+# sn = plant.GetAttribute('t:sgn')
+# pmax = plant.Pmax_uc
+# pmin = plant.Pmin_uc
+# qmax = plant.cQ_max
+# qmin = plant.cQ_min
+# aa=[busnumber,outserv,idplant,pgen,qgen,sn,pmax,pmin,busname,pmin,qmin,plant]
+# plants.append(aa)
+# tous=[]
+# for grid in grids:
+# tous.extend(grid.obj_id.GetContents( '*.ElmAsm', 1))
+# for plant in tous:
+# if plant.i_mot==0:
+# busname=plant.bus1.cBusBar.cStatName
+# for ii in range(len(buses)):
+# if busname in buses[ii]:
+# busnumber = ii
+# break
+# idplant = plant.loc_name#plant.ngnum
+# outserv=plant.outserv
+# if outserv==1:
+# pgen=0
+# qgen = 0
+# else:
+# pgen = plant.GetAttribute('m:P:bus1')
+# qgen = plant.GetAttribute('m:Q:bus1')
+# sn = plant.GetAttribute('t:sgn')
+# pmax = plant.Pmax_uc
+# pmin = plant.Pmin_uc
+# qmax = plant.cQ_max
+# qmin = plant.cQ_min
+# aa=[busnumber,outserv,idplant,pgen,qgen,sn,pmax,pmin,busname,pmin,qmin,plant]
+# plants.append(aa)
+# # tous = []
+# # for grid in grids:
+# # tous.extend(grid.obj_id.GetContents( '*.ElmGenstat', 1))
+# # for plant in tous:
+# # busname = plant.bus1.cBusBar.cStatName
+# # for ii in range(len(buses)):
+# # if busname in buses[ii]:
+# # busnumber = ii
+# # break
+# # idplant = plant.loc_name # plant.ngnum
+# # outserv = plant.outserv
+# # if outserv == 1:
+# # pgen = 0
+# # qgen = 0
+# # else:
+# # pgen = plant.GetAttribute('m:P:bus1')
+# # qgen = plant.GetAttribute('m:Q:bus1')
+# # sn = plant.GetAttribute('e:sgn')
+# # pmax = plant.Pmax_uc
+# # pmin = plant.Pmin_uc
+# # qmax = plant.cQ_max
+# # qmin = plant.cQ_min
+# # aa = [busnumber, outserv, idplant, pgen, qgen, sn, pmax, pmin, busname, pmin, qmin,plant]
+# # plants.append(aa)
+# # tous = []
+# # for grid in grids:
+# # tous.extend(grid.obj_id.GetContents( '*.ElmPvsys', 1))
+# # for plant in tous:
+# # busname = plant.bus1.cBusBar.cStatName
+# # for ii in range(len(buses)):
+# # if busname in buses[ii]:
+# # busnumber = ii
+# # break
+# # idplant = plant.loc_name # plant.ngnum
+# # outserv = plant.outserv
+# # if outserv == 1:
+# # pgen = 0
+# # qgen = 0
+# # else:
+# # pgen = plant.GetAttribute('m:P:bus1')
+# # qgen = plant.GetAttribute('m:Q:bus1')
+# # sn = plant.GetAttribute('e:sgn')
+# # pmax = plant.Pmax_uc
+# # pmin = plant.Pmin_uc
+# # qmax = plant.cQ_max
+# # qmin = plant.cQ_min
+# # aa = [busnumber, outserv, idplant, pgen, qgen, sn, pmax, pmin, busname, pmin, qmin,plant]
+# # plants.append(aa)
+# tous=[]
+# # Motors data (bus, active, reactive, status, name, id)===================== Motor
+#
+# for grid in grids:
+# tous.extend(grid.obj_id.GetContents( '*.ElmSym', 1))
+# motors = []
+# for motor in tous:
+# if motor.i_mot == 1:
+# busname = motor.bus1.cBusBar.cStatName
+# for ii in range(len(buses)):
+# if busname in buses[ii]:
+# busnumber = ii
+# break
+# idplant = motor.loc_name#motor.ngnum
+# outserv = motor.outserv
+# if outserv == 1:
+# pgen = 0
+# qgen = 0
+# else:
+# pgen = motor.GetAttribute('m:P:bus1')
+# qgen = motor.GetAttribute('m:Q:bus1')
+# aa = [busnumber, pgen, qgen, outserv, busname,idplant,motor]
+# motors.append(aa)
+# tous=[]
+# for grid in grids:
+# tous.extend(grid.obj_id.GetContents( '*.ElmAsm', 1))
+# for motor in tous:
+# if motor.i_mot == 1:
+# busname = motor.bus1.cBusBar.cStatName
+# for ii in range(len(buses)):
+# if busname in buses[ii]:
+# busnumber = ii
+# break
+# idplant = motor.loc_name#motor.ngnum
+# outserv = motor.outserv
+# if outserv == 1:
+# pgen = 0
+# qgen = 0
+# else:
+# pgen = motor.GetAttribute('m:P:bus1')
+# qgen = motor.GetAttribute('m:Q:bus1')
+# aa = [busnumber, pgen, qgen, outserv, busname,idplant]
+# motors.append(aa)
+#
+# # Loads data (bus, active, reactive, status, name, id)===================== Load
+# tous=[]
+# for grid in grids:
+# tous.extend(grid.obj_id.GetContents( '*.ElmLod', 1))
+# tous = sorted(tous, key=lambda x: x.bus1.cBusBar.cStatName)
+# loads = []
+# for bus in buses:
+# idload = 0
+# for load in tous:
+# busname = load.bus1.cBusBar.cStatName
+# if busname == bus[3]:
+# idload += 1# cree id pour load
+# busnumber = bus[0]
+# outserv = load.outserv
+# if outserv == 1:
+# pload = 0
+# qload = 0
+# else:
+# pload = load.GetAttribute('m:P:bus1')
+# qload = load.GetAttribute('m:Q:bus1') # qlini_a
+# aa = [busnumber, pload, qload, outserv, busname, idload,load]
+# loads.append(aa)
+# #Fixed shunt data (number, MVAR, name, ...)========================== Fixed Shunt
+# tous=[]
+# for grid in grids:
+# tous.extend(grid.obj_id.GetContents( '*.ElmShnt', 1))
+# tous = sorted(tous, key=lambda x: x.bus1.cBusBar.cStatName)
+# shunt = []
+# for bus in buses:
+# idshunt = 0
+# for shunt1 in tous:
+# if shunt1.ncapx==1:# nombre de step =1, considerer comme fix shunt pour equivalent a l'ancien programme sous PSSE
+# busname = shunt1.bus1.cBusBar.cStatName
+# if busname == bus[3]:
+# idshunt += 1 # cree id pour load
+# busnumber = bus[0]
+# qnom=shunt1.Qmax
+# outserv = shunt1.outserv
+# if outserv == 1:
+# qshunt = 0
+# else:
+# qshunt = shunt1.GetAttribute('m:Q:bus1') # qlini_a
+# aa = [busnumber, outserv, qshunt, busname,qnom, idshunt,bus,shunt1]
+# shunt.append(aa)
+# # Switched shunt data (number, status,MVAR, name,Qnom,id)================Swiched Shunt
+# swshunt = []
+# for bus in buses:
+# idshunt = 0
+# for shunt1 in tous:
+# if shunt1.ncapx != 1: # nombre de step #1, considerer comme switche shunt pour etre equivalent avec l'ancien programme sous PSSE
+# busname = shunt1.bus1.cBusBar.cStatName
+# if busname == bus[3]:
+# idshunt += 1 # cree id pour load
+# busnumber = bus[0]
+# qnom = shunt1.Qmax
+# outserv = shunt1.outserv
+# if outserv == 1:
+# qshunt = 0
+# else:
+# qshunt = shunt1.GetAttribute('m:Q:bus1') # qlini_a
+# aa = [busnumber, outserv, qshunt, busname, qnom, idshunt,shunt1]
+# swshunt.append(aa)
+#
+#######################################################################END RECUPERE
+# settriger_iter.outserv = 1
+# app.SaveAsScenario(scn, 1)
+# # app.Show()# app.SaveAsScenario(scn)
+# for plant in plants:
+# plant[11].pgini=plant[3]-0.01
+# # plant[11].qgini = plant[4]
+# for load in loads:
+# load[6].plini = load[1]
+# load[6].qlini = load[2]
+# scenario_temporaire = app.GetActiveScenario()
+# scenario_temporaire.Save()
+#
+#
+# # app.SaveAsScenario(scn,1)
+# aa=1
+#
+# # return buses, lines, transf, plants, loads, shunt, motors, transf3, swshunt
+
+
+
+def MyLogger(x,y,z,logCSVfilename,ite):
+ f=open(logCSVfilename, 'a')
+ f.write(str(ite)+';')
+ f.write(";")
+ nx = len(x)
+ for i in range(0,nx):
+ f.write(str(x[i]))#f.write("%f;" % (x[i]))
+ f.write(";")
+ f.write(";")
+ nz = len(z)
+ for i in range(0,nz):
+ try:
+ f.write("%f;" % (z[i]))
+ except:
+ f.write(str(z[i])+";")
+ f.write(";")
+ ny = len(y)
+ for j in range(0,ny):
+ f.write("%f;" % (y[j]))
+ f.write("\n")
+ f.close()
+
+
+# Fonction pour ecrire un fichier de sortie type csv pour chaque type de grandeur de sortie
+def MyMultiLogger (x, y, sizeY, z, ite, folder, day, fich, hour):
+ global ny
+ y0=0
+ for fich in range (np.size(sizeY,0)):
+ multilogfilename=folder+"/N"+day+"/Y"+str(fich)+"simulationDClog_"+hour+".csv"
+ f=open(multilogfilename, 'a')
+ f.write("%f;" % (ite))
+ f.write(";")
+ nx = len(x)
+ for i in range(0,nx):
+ f.write("%f;" % (x[i]))
+ f.write(";")
+ nz = len(z)
+ for i in range(0,nz):
+ f.write("%f;" % (z[i]))
+ f.write(";")
+ ny = sizeY[fich]
+ for j in range(0,ny):
+ f.write("%f;" % (y[j+y0]))
+ f.write("\n")
+ f.close()
+ y0 += ny
+ print ("Fichiers "+str(ite)+" enregistres\n\n")
+
+# Analyses graphiques
+def graphical_out (inputSample, outputSampleAll, inputDim, outputDim, montecarlosize) :
+ print ("\n\n\n Writing graphical analysis files...")
+ # A Pairwise scatter plot of the inputs
+ myGraph = Graph()
+ myPairs = Pairs(inputSample, 'Inputs relations', inputSample.getDescription(), "red", "bullet")
+ myGraph.add(Drawable(myPairs))
+ myGraph.draw("Input Samples",640,480,GraphImplementation.PDF)
+ #View(myGraph.getBitmap())
+ print ('Input pairwise scatterplot done...')
+
+ # A Pairwise scatter plot of the outputs
+ myGraph = Graph()
+ myPairs = Pairs(outputSampleAll, 'Output relations', outputSampleAll.getDescription(), "red", "bullet")
+ myGraph.add(Drawable(myPairs))
+ myGraph.draw("Output Samples",640,480,GraphImplementation.PDF)
+ #View(myGraph.getBitmap())
+ print ('Output pairwise scatterplot done...')
+
+ # A Pairwise scatter plot of the inputs/outputs
+ # Draw all scatter plots yj vs xi
+ for j in range(outputDim):
+ outputSamplej=outputSampleAll.getMarginal(j)
+ Ylabelstr=outputSamplej.getDescription()[0]
+ for i in range(inputDim):
+ inputSamplei=inputSample.getMarginal(i)
+ Xlabelstr=inputSamplei.getDescription()[0]
+ X=NumericalSample(montecarlosize,2)
+ for k in range(montecarlosize):
+ X[k,0]=inputSamplei[k][0]
+ X[k,1]=outputSamplej[k][0]
+ myGraph = Graph()
+ myCloud=Cloud(X);
+ mytitle=Ylabelstr+"vs"+Xlabelstr
+ myGraph.add(Drawable(myCloud))
+ myGraph.setAxes(1)
+ myGraph.setXTitle(Xlabelstr)
+ myGraph.setYTitle(Ylabelstr)
+ myGraph.draw(mytitle,640,480,GraphImplementation.PDF)
+ #ViewImage(myGraph.getBitmap())
+ print( 'Input/Output pairwise scatterplot done...')
+
+ # An histogram of the inputs
+ for i in range(inputDim):
+ inputSamplei=inputSample.getMarginal(i)
+ myGraph = VisualTest.DrawHistogram(inputSamplei)
+ labelarray=inputSamplei.getDescription()
+ labelstr=labelarray[0]
+ myGraph.setTitle(labelstr)
+ myGraph.setName(labelstr)
+ myGraph.setXTitle(labelstr)
+ myGraph.setYTitle("Frequency")
+ myGraph.draw(labelstr,640,480,GraphImplementation.PDF)
+ #View(myGraph.getBitmap())
+ print ('Input histogram done...')
+
+ # An histogram of the outputs
+ for j in range(outputDim):
+ outputSamplej=outputSampleAll.getMarginal(j)
+ myGraph = VisualTest.DrawHistogram(outputSamplej)
+ labelarray=outputSamplej.getDescription()
+ labelstr=labelarray[0]
+ myGraph.setTitle(labelstr)
+ myGraph.setName(labelstr)
+ myGraph.setXTitle(labelstr)
+ myGraph.setYTitle("Frequency")
+ myGraph.draw(labelstr,640,480,GraphImplementation.PDF)
+ #View(myGraph.getBitmap())
+ print ('Output histogram done')
+ print ('Graphical output terminated')
+
+
+def config_contingency(LinesList,GroupsList,TransformersList,LoadsList,MotorsList) :
+
+ lines_con=[]
+ groups_con=[]
+ loads_con = []
+ transfos_con = []
+ motors_con = []
+ sizeLines = len(LinesList)
+ sizeGroups = len(GroupsList)
+ sizeTransfos = len(TransformersList)
+ sizeLoads = len(LoadsList)
+ sizeMotors = len(MotorsList)
+ val=[]
+ prob=[]
+
+ for i in range(sizeLines+sizeGroups+sizeTransfos + sizeLoads + sizeMotors) :
+ val.append(int(i))
+ for i in range (sizeLines) :
+ lines_con.append(LinesList[i][0])
+ prob.append(LinesList[i][1])
+ for i in range (sizeGroups) :
+ prob.append(GroupsList[i][1])
+ groups_con.append(GroupsList[i][0])
+ for i in range (sizeTransfos) :
+ prob.append(TransformersList[i][1])
+ transfos_con.append(TransformersList[i][0])
+ for i in range (sizeLoads) :
+ prob.append(LoadsList[i][1])
+ loads_con.append(LoadsList[i][0])
+ for i in range (sizeMotors) :
+ prob.append(MotorsList[i][1])
+ motors_con.append(MotorsList[i][0])
+
+ return lines_con, groups_con, transfos_con, loads_con, motors_con, val, prob
+
+def LoadARMA(time_serie_file, time_serie_SS, time_serie_TH) :
+ f=open(time_serie_file,"r")
+ lines=f.readlines()
+ N=len(lines)
+ Xt=[]
+ for i in range(N) :
+ Xt.append([float(lines[i])])
+
+ myTG=RegularGrid(0,float(time_serie_SS),N)
+ TS=TimeSeries(myTG,NumericalSample(Xt))
+ myWN=WhiteNoise(Distribution(Normal(0,1)),myTG)
+ myState=ARMAState(TS.getSample(),NumericalSample())
+ p=12
+ q=0
+ d=1
+ myFactory = ARMALikelihoodFactory ( p , q , d )
+ myARMA = myFactory.build(TS)
+
+ myARMA.setState(myState)
+
+ AR = myARMA.getARCoefficients()
+ MA = myARMA.getMACoefficients()
+
+ ts = myARMA.getRealization()
+ ts.setName('A realization')
+ myTSGraph=ts.drawMarginal(0)
+ myTSGraph.draw('Realization'+str(p)+","+str(q),640,480,GraphImplementation.PDF)
+ myARMAState=myARMA.getState()
+
+ #Make a prediction of the future on next Nit instants
+ Nit = int(time_serie_TH)
+ myARMA2=ARMA(AR,MA,myWN,myARMAState)
+ possibleFuture=myARMA2.getFuture(Nit)
+ possibleFuture.setName('Possible future')
+
+ Xt2=[]
+ for i in range (len(possibleFuture)):
+ Xt2.append(possibleFuture.getValueAtIndex(i)[0])
+ Max=float(max(Xt2))
+ Min=float(min(Xt2))
+ h=float(Max-Min)
+ for i in range (len(possibleFuture)):
+ value= (Xt2[i]-Min+h/3)/(Max-Min+h/3)
+ possibleFuture.setValueAtIndex(i,NumericalPoint(1,value))
+
+ myFG=possibleFuture.drawMarginal(0)
+ myFG.draw('Future'+str(Nit),640,480,GraphImplementation.PDF)
+
+ return possibleFuture
+
+def LoadTS(time_serie_file) :
+ TS=[]
+ for i in range(len(time_serie_file)) :
+ if time_serie_file[i] == -1 :
+ pass
+ else :
+ f=open(time_serie_file[i],"r")
+ lines=f.readlines()
+ N=len(lines)
+ Xt=[]
+ for j in range(N) :
+ try :
+ float(lines[i])
+ except ValueError :
+ lines[i] = commaToPoint(lines[i])
+ else :
+ pass
+ Xt.append([float(lines[j])])
+ TS.append(Xt)
+ return TS
+
+def KSDist(lines) :
+ print( "Creating Kernel Smoothing distribution ")
+ N=len(lines)
+ Xt=[]
+ for i in range(N) :
+ if lines[i] == "\n" :
+ print( "End of file")
+ break
+ else :
+ try :
+ float(lines[i])
+ except ValueError :
+ lines[i] = commaToPoint(lines[i])
+ else :
+ pass
+ Xt.append([float(lines[i])])
+ NS=NumericalSample(Xt)
+ kernel=KernelSmoothing(Uniform())
+ myBandwith = kernel.computeSilvermanBandwidth(NS)
+ ##for openturns 1.6
+ #KS=kernel.build(NS,myBandwith,1)
+
+ #for openturns 1.8
+ KS=kernel.build(NS,myBandwith)
+ kernel.setBoundaryCorrection(True)
+ return KS
+
+
+def threshold (inputRandomVector, outputVariableOfInterest,pssefun,inputDistribution) :
+ # We create a quadraticCumul algorithm
+ myQuadraticCumul = QuadraticCumul(outputVariableOfInterest)
+
+ # We compute the several elements provided by the quadratic cumul algorithm
+ # and evaluate the number of calculus needed
+ nbBefr = pssefun.getEvaluationCallsNumber()
+
+ # Mean first order
+ meanFirstOrder = myQuadraticCumul.getMeanFirstOrder()[0]
+ nbAfter1 = pssefun.getEvaluationCallsNumber()
+
+ # Mean second order
+ meanSecondOrder = myQuadraticCumul.getMeanSecondOrder()[0]
+ nbAfter2 = pssefun.getEvaluationCallsNumber()
+
+ # Standard deviation
+ stdDeviation = sqrt(myQuadraticCumul.getCovariance()[0,0])
+ nbAfter3 = pssefun.getEvaluationCallsNumber()
+
+ print( "First order mean=", myQuadraticCumul.getMeanFirstOrder()[0])
+ print( "Evaluation calls number = ", nbAfter1 - nbBefr)
+ print( "Second order mean=", myQuadraticCumul.getMeanSecondOrder()[0])
+ print( "Evaluation calls number = ", nbAfter2 - nbAfter1)
+ print ("Standard deviation=", sqrt(myQuadraticCumul.getCovariance()[0,0]))
+ print( "Evaluation calls number = ", nbAfter3 - nbAfter2)
+
+ print ( "Importance factors=")
+ for i in range(inputRandomVector.getDimension()) :
+ print(inputDistribution.getDescription()[i], " = ", myQuadraticCumul.getImportanceFactors()[i])
+ print ("")
+
+def getUserDefined (values):
+ val = []
+ prob = []
+ for a in values:
+ val.append(a[0])
+ prob.append(a[1])
+ dim = len (val)
+
+ prob = list(map(float,prob))
+ prob = [p/sum(prob) for p in prob]
+
+## weights = NumericalPoint(prob)
+## Vals = []
+## for i in range(dim):
+## Vals.append([float(val[i]),float(val[i])+0.000001])
+## ranges = NumericalSample(Vals)
+## return UserDefined(ranges, weights)
+ coll = UserDefinedPairCollection()
+ for i in range (dim) :
+ UDpair=UserDefinedPair(NumericalPoint(1,float(val[i])),float(prob[i]))
+ coll.add(UDpair)
+ return UserDefined(coll)
+
+def getHistogram (values) :
+ step = []
+ prob = []
+ for a in values:
+ step.append(a[0])
+ prob.append(a[1])
+ dim = len (step)
+ myHistogram = HistogramPairCollection(dim)
+ for i in range (dim) :
+ try:
+ myHistogram[i]=HistogramPair(float(step[i]),float(prob[i]))
+ except:
+ pass
+ return myHistogram
+
+def getUserLaw(LawDico):
+ time_serie = 0
+ time_serie_file = ''
+ time_serie_SS = 0
+ time_serie_TH = 0
+ if LawDico['Law']=="Normal":
+ law = Normal(float(LawDico['Mu']),float(LawDico['Sigma']))#Openturns
+ elif LawDico['Law']=="Uniform":
+ law=Uniform(float(LawDico['A']),float(LawDico['B']))
+ elif LawDico['Law']=="Exponential":
+ law=Exponential(float(LawDico['Lambda']),float(LawDico['Gamma']))
+ elif LawDico['Law']=="Weibull":
+ if LawDico['Settings']=='AlphaBeta':
+ law=Weibull(float(LawDico['Alpha']),float(LawDico['Beta']),float(LawDico['Gamma']))
+ elif LawDico['Settings']=='MuSigma':
+ law=Weibull(float(LawDico['Mu']),float(LawDico['Sigma']),float(LawDico['Gamma']),Weibull.MUSIGMA)
+ elif LawDico['Law']=="TruncatedNormal":
+ law=TruncatedNormal(float(LawDico['MuN']),float(LawDico['SigmaN']),float(LawDico['A']),float(LawDico['B']))
+ elif LawDico['Law']=="UserDefined":
+ law=UserDefined(getUserDefined (LawDico['Values']))
+ elif LawDico['Law']=="Histogram":
+ law=Histogram(LawDico['First'], getHistogram (LawDico['Values']))
+ elif LawDico['Law']=="PDF_from_file":
+ law=KSDist(LawDico['FileContents'])
+ elif LawDico['Law']=="TimeSeries_from_file":
+ law = Uniform(0.999999,1)
+ time_serie=1
+ time_serie_file=LawDico['FileContents']
+ else :
+ law = Uniform(0.999999,1)
+ return law, [time_serie, time_serie_file] #[time_serie, time_serie_file, time_serie_SS, time_serie_TH]
+
+def contingency_automatic (dfxPath, acccPath, rate) :
+ psspy.accc_with_dsp_3( 0.5,[0,0,0,1,1,2,0,0,0,0,0],r"""ALL""",dfxPath,acccPath,"","","")
+ psspy.accc_single_run_report_4([1,int(rate),int(rate),1,1,0,1,0,0,0,0,0],[0,0,0,0,6000],[ 0.5, 5.0, 100.0,0.0,0.0,0.0, 99999.],acccPath)
+
+ rslt_summary=pssarrays.accc_summary(acccPath)
+ if int(rate) == 1 :
+ rate = rslt_summary.rating.a
+ elif int(rate) == 2 :
+ rate = rslt_summary.rating.b
+ elif int(rate) == 3 :
+ rate = rslt_summary.rating.c
+ else :
+ print( "NO RATE CHOOSEN")
+
+ Labels=rlst.colabel
+ contin_load=[]
+ for label in Labels :
+ t=[]
+ rslt=pssarrays.accc_solution(acccPath,contingency,label,0.5,5.0)
+ ampFlow=rslt.ampflow
+ for i in range (len(rA)) :
+ t.append(ampFlow[i]/rate[i])
+ contin_load.append(t)
+ return contin_load
+
+def commaToPoint (string) :
+ stringReplaced = string.replace(',','.')
+ return stringReplaced
+
+def PFFunct(dico,x):
+ # start1 = time.clock();
+ stop = time.clock(); start = stop;
+ Output = []
+ LS = []
+ FS = []
+ Pmachine = []
+ LStable = []
+ FStable = []
+ LS_beforeUC = []
+ FS_beforeUC = []
+ Pmachine_beforeUC = []
+ LStable_beforeUC = []
+ FStable_beforeUC = []
+ Output_beforeUC = []
+ flag_error=0
+
+ num_pac = dico['num_pac']
+ logCSVfilename = dico['logCSVfilename']
+
+ inputSample = []
+ for ite in range(len(x)):
+ inputSample.append(np.array(x[ite]))
+
+ TStest = dico['TStest']
+ Xt = dico['Xt']
+ folder = dico['folder']
+ folderN_1 = dico['folderN_1']
+ day = dico['day']
+ doc_base = dico['doc_base']
+ os.chdir(doc_base) # to work in right directory of the package
+ PFParams = dico['PFParams']
+
+ continLines = dico['continLines']
+ continGroups = dico['continGroups']
+ continTransfos = dico['continTransfos']
+ continLoads = dico['continLoads']
+ continMotors = dico['continMotors']
+ continVal = dico['continVal']
+ continProb = dico['continProb']
+ position = dico['position']
+ timeVect = dico['timeVect']
+ LawsList = dico['CorrMatrix']['laws']
+ N_1_LINES = dico['N_1_LINES']
+ N_1_TRANSFORMERS = dico['N_1_TRANSFORMERS']
+ N_1_MOTORS = dico['N_1_MOTORS']
+ N_1_LOADS = dico['N_1_LOADS']
+ N_1_GENERATORS = dico['N_1_GENERATORS']
+ # nombre d'element N_1
+ nN1 = len(N_1_LINES) + len(N_1_TRANSFORMERS) + len(N_1_MOTORS) + len(N_1_LOADS) + len(N_1_GENERATORS)
+ x_copy = []
+
+ for ite in range(len(x)):
+ xite = []
+ for j in range(len(x[ite])):
+ xite.append(x[ite][j])
+ x_copy.append(xite)
+
+ for ite in range(len(x)):
+ if TStest == 1:
+ for i, law in enumerate(LawsList):
+ if Xt[ite][i] == -1:
+ if law != 'N_1_fromFile':
+ if 'Unavailability' in dico['Laws'][law]['Type']:
+ status = int(round(x[ite][i])) # idealement on a tiré un chiffre entre 0 et 1, 0 et 1 inclus
+ status = min(status, 1) # on force status à avoir une valeur 0 ou 1
+ status = max(status, 0)
+ x_copy[ite][i] = status
+ if dico['Laws'][law]['ComponentType'] == 'Generator' and 'Level' in dico['Laws'][law]['Type']:
+ if dico['Laws'][law]['TransferFunction'] == True:
+ if dico['Laws'][law]['TF_Input'] == '.pow file':
+ z_WS = dico['Laws'][law]['Wind_Speed_Measurement_Height']
+ pathWT = dico['Laws'][law]['File_Name']
+ HH = dico['Laws'][law]['Hub_Height']
+ alpha = dico['Laws'][law]['AlphaWS']
+ PercentLoss = dico['Laws'][law]['Percent_Losses']
+ x_copy[ite][i] = eol(np.array([x[ite][i]]), z_WS, pathWT, HH, alpha, PercentLoss)[0]
+ elif dico['Laws'][law]['TF_Input'] == 'tuples list':
+ x_copy[ite][i] = applyTF(x[ite][i], dico['Laws'][law]['TF_Values'])
+ else: # ensure values are between 0 and 1
+ Pval = x[ite][i]
+ Pval = min(Pval, 1)
+ Pval = max(Pval, 0)
+ x_copy[ite][i] = Pval
+ else: # law=='N_1_fromFile"
+ x_copy[ite][i] == int(floor(x[ite][i]))
+
+ else:
+ x_copy[ite][i] = float(Xt[ite][i]) # Dans le cas d'une etude temporelle on lui donne la valeur de Xt
+
+ else:
+ for i, law in enumerate(LawsList):
+ if law != 'N_1_fromFile':
+ if 'Unavailability' in dico['Laws'][law]['Type']:
+ status = int(round(x[ite][i])) # idealement on a tiré un chiffre entre 0 et 1, 0 et 1 inclus
+ status = min(status, 1) # on force status à avoir une valeur 0 ou 1
+ status = max(status, 0)
+ x_copy[ite][i] = status
+ if dico['Laws'][law]['ComponentType'] == 'Generator' and 'Level' in dico['Laws'][law]['Type']:
+ if dico['Laws'][law]['TransferFunction'] == True:
+ if dico['Laws'][law]['TF_Input'] == '.pow file':
+ z_WS = dico['Laws'][law]['Wind_Speed_Measurement_Height']
+ pathWT = dico['Laws'][law]['File_Name']
+ HH = dico['Laws'][law]['Hub_Height']
+ alpha = dico['Laws'][law]['AlphaWS']
+ PercentLoss = dico['Laws'][law]['Percent_Losses']
+ x_copy[ite][i] = eol(np.array([x[ite][i]]), z_WS, pathWT, HH, alpha, PercentLoss)[0]
+ # x_copy[ite][i]=x[ite][i]
+ elif dico['Laws'][law]['TF_Input'] == 'tuples list':
+ x_copy[ite][i] = applyTF(x[ite][i], dico['Laws'][law]['TF_Values'])
+ else: # ensure values are between 0 and 1
+ Pval = x[ite][i]
+ Pval = min(Pval, 1)
+ Pval = max(Pval, 0)
+ x_copy[ite][i] = Pval
+ else: # law=='N_1_fromFile"
+ x_copy[ite][i] == int(floor(x[ite][i]))
+ # creer donnes pour data_trigger.csv
+ lenlaw = len(x_copy[0]) - 1 # nombre de laws
+ xlaw = [] # xlaw ne prend pas le colonne N_1 de x_copy
+
+ if nN1!=0:
+ for iter in range(len(x)):
+ aa = [] # variable temporaire
+ for ii in range(lenlaw):
+ aa.append(x_copy[iter][ii])
+ xlaw.append(aa)
+ else:
+ for iter in range(len(x)):
+ aa = [] # variable temporaire
+ for ii in range(lenlaw+1):
+ aa.append(x_copy[iter][ii])
+ xlaw.append(aa)
+
+
+ nameN1 = [] # nom des elements N_1
+ for N1 in N_1_LINES:
+ nameN1.append(N1)
+ for N1 in N_1_TRANSFORMERS:
+ nameN1.append(N1)
+ for N1 in N_1_MOTORS:
+ nameN1.append(N1)
+ for N1 in N_1_LOADS:
+ nameN1.append(N1)
+ for N1 in N_1_GENERATORS:
+ nameN1.append(N1)
+ matrixN1 = np.zeros((len(x), nN1))
+
+ # creer matrix pour les elements dans 'N_1_fromFile"
+ for ite in range(len(x)):
+ for i, law in enumerate(LawsList):
+ if law == 'N_1_fromFile': # law=='N_1_fromFile"
+ x_copy[ite][i] = int(floor(x[ite][i]))
+ if x_copy[ite][i] < 0:
+ pass
+
+ if x_copy[ite][i] < len(continLines): # L'element tire est une ligne
+ line_num = int(x_copy[ite][i])
+ line_name = continLines[int(line_num)]
+ for ii, name in enumerate(nameN1):
+ if line_name == name:
+ matrixN1[ite][ii] = 1
+
+ elif x_copy[ite][i] < (len(continLines) + len(continGroups)):
+ group_num = int(x_copy[ite][i]) - len(continLines)
+ group_name = continGroups[int(group_num)]
+ for ii, name in enumerate(nameN1):
+ if group_name == name:
+ matrixN1[ite][ii] = 1
+
+ elif x_copy[ite][i] < (len(continLines) + len(continGroups) + len(continTransfos)):
+ transfo_num = int(x_copy[ite][i]) - len(continLines) - len(continGroups)
+ transfo_name = continTransfos[int(transfo_num)]
+ for ii, name in enumerate(nameN1):
+ if transfo_name == name:
+ matrixN1[ite][ii] = 1
+ elif x_copy[ite][i] < (len(continLines) + len(continGroups) + len(continTransfos) + len(continLoads)):
+ load_num = int(x_copy[ite][i]) - len(continLines) - len(continGroups) - len(continTransfos)
+ load_name = continLoads[int(load_num)]
+ for ii, name in enumerate(nameN1):
+ if load_name == name:
+ matrixN1[ite][ii] = 1
+
+ elif x_copy[ite][i] < (len(continLines) + len(continGroups) + len(continTransfos) + len(
+ continLoads) + len(continMotors)):
+ motor_num = int(x_copy[ite][i]) - len(continLines) - len(continGroups) - len(continTransfos) - len(
+ continLoads)
+ motor_name = continMotors[int(motor_num)]
+ for ii, name in enumerate(nameN1):
+ if motor_name == name:
+ matrixN1[ite][ii] = 1
+ else:
+ pass
+ xchavec = np.column_stack([np.asarray(xlaw), matrixN1])
+ # write data_trigger.csv file for chavecfile characteristic
+ aa = np.asarray(xchavec)
+ bb = np.arange(0, len(xchavec)) + position
+ cc = np.column_stack([bb, aa])
+ np.savetxt('data.csv', cc, delimiter=';', fmt='%10.5f')
+ filer = open('data.csv', 'r')
+ filew = open('data_trigger.csv', 'a')
+ for line in filer:
+ if PFParams['DECIMAL_SEPARATOR'] == ",":
+ text = line.replace('.', ',')
+ text = text.replace(' ', '')
+ else:
+ text = line.replace(' ', '')
+ filew.write(text)
+ filer.close()
+ filew.close()
+ filer = os.path.join(os.getcwd(), 'data.csv')
+ os.remove(filer)
+
+ stop = time.clock(); print('Prepare to run comTask in ' + str(round(stop - start, 3)) + ' seconds'); start = stop;
+ if sys.platform.startswith("win"): # traitement pour eviter les messages d'erreur qui peuvent bloquer le programme
+ import ctypes
+ SEM_NOGPFAULTERRORBOX = 0x0002
+ ctypes.windll.kernel32.SetErrorMode(SEM_NOGPFAULTERRORBOX);
+ CREATE_NO_WINDOW = 0x08000000
+ subprocess_flags = CREATE_NO_WINDOW
+ else:
+ subprocess_flags = 0
+
+
+ lancer = [dico['Paths']['Python3_path']+'/python.exe', os.path.dirname(os.path.realpath(__file__)) +'/run_in_PFfunction.py']
+ print('before run_in_PFfunction.py')
+ proc1 = subprocess.Popen(lancer,shell=True,creationflags=subprocess_flags)
+ # proc.wait()
+ aa=0
+ while 1:
+ aa += 1
+ print('==========time since start of package================' + str(aa*5)) # compter le temps
+
+ final = []
+ for element in os.listdir(dico['doc_base']):
+ if element.endswith('.final'):
+ final.append(element)
+
+
+ if len(final) >= dico['lenpac'] - 2:# supposons 2 cas ne peut pas se terminer
+ if len(final) == dico['lenpac']:
+ comtask_ok = 0 # comtask reussi
+ else:
+ comtask_ok = 1 # comtask non reussi, manque quelque cas
+ time.sleep(5)
+ if proc1.poll()!=0:
+ var1=subprocess.call(['taskkill', '/F', '/T', '/PID', str(proc1.pid)],stdout=subprocess.PIPE)
+ # proc.kill()
+ break
+ if (proc1.poll()!=None):
+ comtask_ok=0
+ flag_error=1
+ filew = open(os.path.dirname(os.path.realpath(__file__)) + '/canotComtast' + str(position) + '.txt', 'w')
+ filew.write( 'ignore'+ '\n')
+ filew.close()
+ var1 =subprocess.call(['taskkill', '/F', '/T', '/PID', str(proc1.pid)],stdout=subprocess.PIPE)
+ break
+ time.sleep(5)
+ cmd = 'WMIC PROCESS get Caption,Processid'
+ proc2 = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
+ task = []
+ for line in proc2.stdout:
+ task.append(str(line))
+ # print(str(line))
+ # bb = 0
+ for kk in task:
+ if 'PowerFactory' in kk:
+ var2 =subprocess.call('tskill PowerFactory',stdout=subprocess.PIPE)
+
+ print('terminate run_in_PFfunction.py')
+
+ if comtask_ok == 1:# refaire la simulation des studycases manques
+
+ final = []
+ for element in os.listdir(dico['doc_base']):
+ if element.endswith('.final'):
+ final.append(element)
+ if len(final) != dico['lenpac']:# verifier encore une fois si tous les cas sont simules
+ filew = open(os.path.dirname(os.path.realpath(__file__))+'/absence'+str(position)+'.txt', 'w')
+ for ite in range(len(x)):
+ name = 'Case_' + str(ite + dico['position']) + '.final'
+ if name not in final:
+ filew.write(str(ite + dico['position']) + '\n')
+ filew.close()
+ print('Run correct_comtask.py now')
+ lancer = [dico['Paths']['Python3_path']+'\python.exe', os.path.dirname(os.path.realpath(__file__)) +'/correct_comtask.py']
+ # time.sleep(20)
+ proc = subprocess.Popen(lancer,creationflags=subprocess_flags)
+ proc.poll()
+ proc.wait()
+ # print(proc.returncode)
+ # print('proc.returncode===============ater correct_comtask')
+ print('after correct_comtask.py')
+ var3 = subprocess.call(['taskkill', '/F', '/T', '/PID', str(proc.pid)], stdout=subprocess.PIPE)
+
+ cmd = 'WMIC PROCESS get Caption,Processid'
+ proc4 = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
+ task = []
+ for line in proc4.stdout:
+ task.append(str(line))
+ # print(str(line))
+ # bb = 0
+ for kk in task:
+ if 'PowerFactory' in kk:
+ # bb += 1
+ print('!!!!!!!!!!!!!!!! PowerFactory remains After CorrectComtask !!!!!!!!!!!!!!!!!!!')
+ # os.system('tskill ' + 'PowerFactory') # focer de fermer PowerFactory
+ var2 =subprocess.call('tskill PowerFactory',stdout=subprocess.PIPE)
+ # print('====================' + str(bb))
+ stop = time.clock(); print('Run ComTask in ' + str(round(stop - start, 3)) + ' seconds'); start = stop;
+ var1 = subprocess.call(['taskkill', '/F', '/T', '/PID', str(proc1.pid)], stdout=subprocess.PIPE)
+
+ ##########################################################################################END calcul parallele
+ ##########################################################################################BEGIN traitement donne
+
+ if flag_error==0:
+ if dico['UnitCommitment']:
+ beforeUC = []
+ for element in os.listdir(dico['doc_base']):
+ if element.endswith('.before'):
+ beforeUC.append(element)
+ mm = [0] # start to extract number for sort case's name
+ for aa in range(1, len(beforeUC)): # extract number in string
+ nn = ''.join(ele for ele in beforeUC[aa] if ele.isdigit())
+ mm.append(int(nn))
+ nn = sorted(mm)
+ aa = []
+ for kk in nn:
+ aa.append(beforeUC[mm.index(kk)])
+ beforeUC = aa # sort names
+ # os.chdir(dico['doc_base'])
+ for case in beforeUC[-len(x):]:
+ with open(case, 'rb') as fichier:
+ mon_depickler = pickle.Unpickler(fichier)
+ y, z, Ymac, indicLS, indicFS, loadShed, fxshnt = mon_depickler.load()
+ nn = ''.join(ele for ele in case if ele.isdigit()) #extrait number
+ x2 = xlaw[int(nn)-dico['position']].copy()
+ for ii in range(len(matrixN1[int(nn)-dico['position']])):
+ if matrixN1[int(nn)-dico['position']][ii] == 1:
+ x2.append(nameN1[ii])
+ # x2=x_copy[int(nn)]
+ Output_beforeUC.append(z) # append the output
+ Pmachine_beforeUC.append(Ymac)
+ LS_beforeUC.append(indicLS)
+ FS_beforeUC.append(indicFS)
+ LStable_beforeUC.extend(loadShed)
+ FStable_beforeUC.extend(fxshnt)
+ if TStest == 1:
+ MyLogger(x2, y, z, dico['logCSVfilename_UC'][num_pac], timeVect[int(nn)])#ite])
+ else:
+ MyLogger(x2, y, z, dico['logCSVfilename_UC'][num_pac], int(nn)) # for each iteration write in the CSV
+ for file in beforeUC:# effacer les fichiers pickle
+ os.remove(file)
+ # print('Show UC in ' + str(round(stop - start, 3)) + ' seconds'); start = stop;
+
+ final = []
+ for element in os.listdir(dico['doc_base']):
+ if element.endswith('.final'):
+ final.append(element)
+ mm = [0] # start to extract number for sort case's name
+ for aa in range(1, len(final)): # extract number in string
+ nn = ''.join(ele for ele in final[aa] if ele.isdigit())
+ mm.append(int(nn))
+ nn = sorted(mm)
+ aa = []
+ for kk in nn:
+ aa.append(final[mm.index(kk)])
+ final = aa # sort names
+ # os.chdir(dico['doc_base'])
+ for case in final[-len(x):]:
+ with open(case, 'rb') as fichier:
+ mon_depickler = pickle.Unpickler(fichier)
+ y, z, Ymac, indicLS, indicFS, loadShed, fxshnt = mon_depickler.load()
+ nn = ''.join(ele for ele in case if ele.isdigit()) # extrait number
+ x2 = xlaw[int(nn)-dico['position']].copy()
+ for ii in range(len(matrixN1[int(nn)-dico['position']])):
+ if matrixN1[int(nn)-dico['position']][ii] == 1:
+ x2.append(nameN1[ii])
+ # x2 = x_copy[int(nn)-dico['position']]
+ if TStest == 1:
+ MyLogger(x2, y, z, logCSVfilename[num_pac], timeVect[int(nn)])#ite])
+ else:
+ MyLogger(x2, y, z, logCSVfilename[num_pac], int(nn)) # for each iteration write in the CSV
+ Output.append(z) # append the output
+ Pmachine.append(Ymac)
+ LS.append(indicLS)
+ FS.append(indicFS)
+ LStable.extend(loadShed)
+ FStable.extend(fxshnt)
+ for file in final:# effacer les fichiers pickle
+ os.remove(file)
+ print(nameN1)
+ ##########################################################################################END traitement donne
+
+ return inputSample, Output, Pmachine, LS, FS, LStable, FStable, Output_beforeUC, Pmachine_beforeUC, LS_beforeUC, FS_beforeUC, LStable_beforeUC, FStable_beforeUC
+
+def create_dist(dico):
+
+ NumLaws = len(dico['Laws']) + int(dico['N_1_fromFile'])
+
+ #Create a correlation matrix as copulas
+ CorrMatrixNames = dico['CorrMatrix']['laws']
+ CorrMatrix = dico['CorrMatrix']['matrix']
+ corr=CorrelationMatrix(NumLaws)#Openturns
+
+ # Create a collection of the marginal distributions
+ collectionMarginals = DistributionCollection(NumLaws)#Openturns
+
+ distributionX = []
+ for i,key in enumerate(CorrMatrixNames):
+ data, [time_serie, time_serie_file] = getUserLaw(dico['Laws'][key])
+ distributionX.append( data )
+ collectionMarginals[i] = Distribution(data)
+
+ #add N_1 components entered as Files
+ if dico['N_1_fromFile']==True:
+ continTuples = []
+ for j in range(len(dico['continVal'])):
+ continTuples.append((dico['continVal'][j],dico['continProb'][j]))
+ data = getUserDefined(continTuples)
+ distributionX.append(data)
+ collectionMarginals[i+1] = Distribution(data)
+ aa = []
+ for bb in CorrMatrixNames:
+ aa.append(bb)
+ aa.append('N_1_fromFile')
+ dico['CorrMatrix']['laws'] = aa
+ CorrMatrixEx = np.hstack((CorrMatrix, np.zeros((NumLaws-1,1)))) #assume no correlation between N-1 and other laws
+ LastLine = np.hstack((np.zeros((1,NumLaws-1)),np.ones((1,1))))
+ CorrMatrixEx = np.vstack((CorrMatrixEx, LastLine))
+ CorrMatrix = CorrMatrixEx
+ (Nrows, Ncols) = np.shape(CorrMatrixEx)
+ else:
+ (Nrows, Ncols) = np.shape(CorrMatrix)
+ for i in range(Nrows):
+ for j in range(Ncols):
+ corr[i,j]=CorrMatrix[i,j]
+
+ corr2= NormalCopula.GetCorrelationFromSpearmanCorrelation(corr)
+ copula=Copula(NormalCopula(corr2))
+ #copula=Copula(NormalCopula(corr))
+
+ # Create the input probability distribution, args are the distributions, the correlation laws
+ inputDistribution = ComposedDistribution(collectionMarginals, copula)
+
+ return inputDistribution
+
+def Calculation(dico,nb_fix,cmd_Path):
+ msg = 'run'
+ output1=[]
+ inputSamp1=[]
+ Pmachine1=[]
+ Ind1,Ind2=[],[]
+ LStable = []
+ FStable = []
+ LStable_beforeUC = []
+ FStable_beforeUC = []
+ output_beforeUC = []
+ Pmachine_beforeUC = []
+ t = 0 #numero de package
+
+
+ p = subprocess.Popen([dico['Paths']['Python3_path']+'\\python.exe', cmd_Path], stdout=subprocess.PIPE) # launch subprocess
+ nbsr = NonBlockingStreamReader(p.stdout) # monitor subprocess stdout
+# if debug:
+# chemin=os.path.abspath(os.path.join(os.getcwd(), '../'))
+# else:
+
+ chemin=os.getcwd()
+ dico['cheminPSEN'] = chemin
+ os.chdir(dico['doc_base']) # to work in correct directory
+
+ flag2 = dico['flag2']
+ inputDistribution = create_dist(dico) # create new distribution
+ RandomGenerator.SetSeed(os.getpid())
+ outputSampleAll = NumericalSample(0,12)
+
+ while msg == 'run':
+
+ stop = time.clock();start=stop;
+
+ t += 1
+ print('Package ' + str(t))
+ # LStable=[]
+ # FStable=[]
+ output=[]
+ inputSample=[]
+ Pmachine=[]
+ # LStable_beforeUC=[]
+ # FStable_beforeUC=[]
+ # output_beforeUC=[]
+ # Pmachine_beforeUC=[]
+
+ myMCE = MonteCarloExperiment(inputDistribution,dico['lenpac']) #create new sample
+ inputSamp = myMCE.generate()
+ dicow = dico.copy()
+ dicow['inputSamp']=inputSamp
+ del dicow['all_inputs_init']
+ del dicow['CorrMatrix']
+ dicow['CorrMatrix'] = {}
+ dicow['CorrMatrix']['laws'] = list(dico['CorrMatrix']['laws'])
+ dicow['CorrMatrix']['matrix'] = dico['CorrMatrix']['matrix']
+
+ with open(chemin + '/PSEN/data_dico', 'wb') as fichier: # sauvegarder pour passer les donnes au compython
+ mon_pickler = pickle.Pickler(fichier, protocol=2)
+ mon_pickler.dump(dicow)
+ print(' Enter in PFfunction.py')
+ res=PFFunct(dico,inputSamp) #launch PSSEFunct (OPF)
+ print('Out PFfunction.py')
+ # 0 1 2 3 4 5 6
+ #inputSample, Output, Pmachine, LS, FS, LStable, FStable,
+ # 7 8 9 10 11 12
+ #Output_beforeUC, Pmachine_beforeUC, LS_beforeUC, FS_beforeUC, LStable_beforeUC, FStable_beforeUC
+ for result in res[1]:
+ outputSampleAll.add(NumericalPoint(result)) #create a Numerical Sample variable
+ if (flag2):
+ LS=(np.mean(res[3])) #mean per package
+ FS=(np.mean(res[4])) #mean per package
+ z=[LS,FS]
+ #if criteria on nbeTension and NbeTransit
+ else:
+ NbeTransit=(float(NumericalPoint(1,outputSampleAll.computeMean()[0])[0])) #mean per package
+ NbeTension=(float(NumericalPoint(1,outputSampleAll.computeMean()[1])[0]))
+ z=[NbeTransit,NbeTension]
+
+
+ inputSample.extend(res[0])
+ LStable.extend(res[5])
+ FStable.extend(res[6])
+ output.extend(res[1])
+ Pmachine.extend(res[2])
+
+ LStable_beforeUC.extend(res[11])
+ FStable_beforeUC.extend(res[12])
+ output_beforeUC.extend(res[7])
+ Pmachine_beforeUC.extend(res[8])
+
+ output1.extend(output)
+ inputSamp1.extend(inputSample)
+ Pmachine1.extend(Pmachine)
+ if msg=='run':
+ msg, indice1, indice2=Convergence(dico,int(dico['PFParams']['LS_Q_CONVERGENCE_CRITERIA']), nb_fix, cmd_Path,z,t)# verifier la convergence
+ Ind1.append(indice1)
+ Ind2.append(indice2)
+ if len(Ind1) == nb_fix:
+ msg = 'stop'
+ if msg == 'stop':
+ p.terminate()
+ appui = nbsr.readline(0.1)
+ if appui:
+ print('Simulation Interrupting.....')
+ msg = 'stop'
+ dico['position'] += dico['lenpac']
+ stop = time.clock(); start = stop;
+
+ print('terminate all package, prepare to export Allcase.pfd file')
+ cmd = 'WMIC PROCESS get Caption,Processid'
+ proc2 = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
+ task = []
+ for line in proc2.stdout:
+ task.append(str(line))
+ # print(str(line))
+ for kk in task:
+ if 'PowerFactory' in kk:
+ # bb += 1
+ print('!!!!!!!!!!!!!!!! PowerFactory remains in Calculation !!!!!!!!!!!!!!!!!!!')
+ var2 =subprocess.call('tskill PowerFactory',stdout=subprocess.PIPE)
+ time.sleep(5)
+
+ if dico['UnitCommitment']:
+ f=open(dico['logCSVfilename_UC'][dico['num_pac']],'a')
+ f.write("\n Summary Table for MW Load Adjustments;;;;;;;;Summary Table for Added Shunt (Mvar)\n")
+ f.write("Iteration;;Bus Number;Name;Load Shed;Remaining Load;;;Iteration;;Bus Number;Final \n")
+ for i in range(max(len(LStable_beforeUC),len(FStable_beforeUC))):
+ try:
+ f.write('{0};;{1};{2};{3};{4}'.format(LStable_beforeUC[i][0],LStable_beforeUC[i][1]\
+ ,LStable_beforeUC[i][2],LStable_beforeUC[i][3],LStable_beforeUC[i][4]))
+ except:
+ f.write(';;;;;')
+ try:
+ f.write(';;;{0};;{1};{2} \n'.format(FStable_beforeUC[i][0],FStable_beforeUC[i][1],FStable_beforeUC[i][2]))
+ except:
+ f.write('\n')
+ f.write("\n\n")
+ f.close()
+
+ ## #write summary tables
+ f=open(dico['logCSVfilename'][dico['num_pac']],'a')
+ f.write("\n Summary Table for MW Load Adjustments;;;;;;;;Summary Table for Added Shunt (Mvar)\n")
+ f.write("Iteration;;Bus Number;Name;Load Shed;Remaining Load;;;Iteration;;Bus Number;Final \n")
+ for i in range(max(len(LStable), len(FStable))):
+ try:
+ f.write('{0};;{1};{2};{3};{4}'.format(LStable[i][0],LStable[i][1]\
+ ,LStable[i][2],LStable[i][3],LStable[i][4]))
+ except:
+ f.write(';;;;;')
+ try:
+ f.write(';;;{0};;{1};{2} \n'.format(FStable[i][0],FStable[i][1],FStable[i][2]))
+ except:
+ f.write('\n')
+ f.write("\n\n")
+ f.close()
+
+ try:
+ import powerfactory
+ app = powerfactory.GetApplication()
+ user = app.GetCurrentUser()
+ prjs = user.GetContents('*.IntPrj')
+ prjs.sort(key=lambda x: x.gnrl_modif, reverse=True)
+ prj = prjs[0]
+ # prj.Activate()
+ ComExp = user.CreateObject('ComPfdExport')# objet pour exporter .pfd file final qui contient tous les cas de simulation
+ app.SetWriteCacheEnabled(1) # Disable consistency check
+ ComExp.g_objects = [prj] # define the project to be exported
+ ComExp.g_file = os.path.join(dico['doc_base'], "AllCase.pfd")
+ err = ComExp.Execute() # Command starts the export process
+ app.SetWriteCacheEnabled(0) # Enable consistency check
+ print(prj)
+ print(prj.loc_name)
+ ComExp.Delete()
+ prj.Delete()
+ stop = time.clock(); print(' Export all study case in ' + str(round(stop - start, 3)) + ' seconds'); start = stop;
+ except:
+ pass
+ import shutil
+ shutil.copy2(chemin + '/PSEN/data_dico', 'data_dico') # sauvegarder donnees
+
+ shdfileUC = []
+ for element in os.listdir(os.path.dirname(os.path.realpath(__file__))):
+# tempdir = r'C:\Logiciels DER\PSEN_PF_V4\Example\Results'
+# for element in tempdir:
+ if element.endswith('.shdUC'):
+ shdfileUC.append(element)
+ mm = [] # start to extract number for sort case's name
+ for aa in range(len(shdfileUC)): # extract number in string
+ nn = ''.join(ele for ele in shdfileUC[aa] if ele.isdigit())
+ mm.append(int(nn))
+ nn = sorted(mm)
+ aa = []
+ for kk in nn:
+ aa.append(shdfileUC[mm.index(kk)])
+ shdfileUC = aa # sort names
+
+ if len(shdfileUC)>0:
+ # dico['doc_base']
+ filew = open(os.path.dirname(dico['doc_base']) + '/No_Cost_OPF_convergence_beforeUC' + '.csv', 'w')
+ for aa in range(len(shdfileUC)): # extract number in string
+ strings = aa
+ strings = shdfileUC[aa].split('_')
+ filew.write('Case_' + strings[1] + ';' + strings[2].split('.')[0] + '\n')
+ filew.close()
+ for file in shdfileUC:
+ os.remove(os.path.dirname(os.path.realpath(__file__)) + '\\' + file)
+
+ shdfile = []
+ for element in os.listdir(os.path.dirname(os.path.realpath(__file__))):
+# for element in tempdir:
+ if element.endswith('.shd'):
+ shdfile.append(element)
+ mm = [] # start to extract number for sort case's name
+ for aa in range(len(shdfile)): # extract number in string
+ nn = ''.join(ele for ele in shdfile[aa] if ele.isdigit())
+ mm.append(int(nn))
+ nn = sorted(mm)
+ aa = []
+ for kk in nn:
+ aa.append(shdfile[mm.index(kk)])
+ shdfile = aa # sort names
+
+ if len(shdfile)>0:
+ # dico['doc_base']
+ filew = open(os.path.dirname(dico['doc_base']) + '/No_Cost_OPF_convergence' + '.csv', 'w')
+ for aa in range(len(shdfile)): # extract number in string
+ strings = aa
+ strings = shdfile[aa].split('_')
+ filew.write('Case_' + strings[1] + ';' + strings[2].split('.')[0] + '\n')
+ filew.close()
+
+ for file in shdfile: # effacer les fichiers pickle
+ os.remove(os.path.dirname(os.path.realpath(__file__)) + '\\' + file)
+
+ return Ind1,Ind2,output1,inputSamp1,Pmachine1
+
+class NonBlockingStreamReader(): #class object to read in a stdout process
+
+ def __init__(self, stream):
+ '''
+ stream: the stream to read from.
+ Usually a process' stdout or stderr.
+ '''
+ self._s = stream
+ self._q = Queue()
+
+ def _populateQueue(stream, queue):
+ '''
+ Collect lines from 'stream' and put them in 'queue'.
+ '''
+ while True:
+ line = stream.read()
+ if line:
+ queue.put(line)
+ else:
+ pass
+ self._t = Thread(target = _populateQueue,
+ args = (self._s, self._q))
+ self._t.daemon = True
+ self._t.start() #start collecting lines from the stream
+
+ def readline(self, timeout = None):
+ try:
+ return self._q.get(block = timeout is not None,
+ timeout = timeout)
+ except Empty:
+ return None
+
+def Convergence(dico,OPF, nb_fix, cmd_Path,z,t):
+ LS=[]
+ FS=[]
+ MoyTension=[]
+ MoyTransit=[]
+ MoyCumuLS=[]
+ MoyCumuFS=[]
+ NbeTension=[]
+ NbeTransit=[]
+ msg='run'
+ print ('Calculating convergence criteria\n')
+ debut=z
+ # t += 1
+ # print('Package ' + str(t))
+ if (OPF): # if criteria on Load shed and mvar
+ LS.append(debut[0])
+ FS.append(debut[1])
+ MoyCumuLS.append(np.mean(LS[0:t]))
+ MoyCumuFS.append(np.mean(FS[0:t]))
+
+ if t == 1:
+ indice1 = 1
+ indice2 = 1
+ else:
+ indice1 = np.std(MoyCumuLS) # calculate stop criterion for load shedding
+ indice2 = np.std(MoyCumuFS) # calculate stop criterion for mvar
+
+ Ind1.append(indice1)
+ Ind2.append(indice2)
+ print('indicator Load Shedding= ' + str(indice1) + ';' + ' indicator Added Mvar= ' + str(indice2) + '\n')
+
+ if (indice1 < 0.2) and (indice2 < 0.015) and nb_fix == 0:
+ msg = 'stop'
+ # break
+ elif len(Ind1) == nb_fix:
+ msg = 'stop'
+ # break
+ else:
+ NbeTransit.append(debut[0])
+ NbeTension.append(debut[1])
+ MoyTension.append(np.mean(NbeTension[0:len(NbeTension)]))
+ MoyTransit.append(np.mean(NbeTransit[0:len(NbeTransit)]))
+
+ if t == 1:
+ indice1 = 1
+ indice2 = 1
+ else:
+ indice1 = np.std(MoyTension) # calculate stop criterion for tension
+ indice2 = np.std(MoyTransit) # calculate stop criterion for transit
+
+ print('indicator Nbe Tension= ' + str(indice1) + ' indicator Transit= ' + str(indice2) + '\n')
+
+ if (indice1 < 0.01) and (indice2 < 0.01) and nb_fix == 0:
+ msg = 'stop'
+
+ return msg,indice1,indice2
--- /dev/null
+# -*- coding: cp1252 -*-
+import sys
+from Tkinter import *
+import os
+
+
+def maFonction6(event):
+ quitting()
+
+def quitting():
+ can1.delete(proceeding)
+ can1.create_text(200,50,font=('Fixedsys',12),text="If you want to quit press button again...")
+ Button(root,text="Stop Simulation",font=("Fixedsys"),command=really_quitting).grid(row=4,column=1,sticky=N,padx=5)
+
+def really_quitting():
+ print 'quitting'
+ root.destroy()
+
+# création d'une instance de la classe TK, que l'on affecte à l'objet "root"
+root = Tk()
+root.title("PSEN - Processing...")
+can1=Canvas(root,width=400,height=100,bg="light blue")
+can1.grid(row=0,column=0,rowspan=10)
+
+proceeding=can1.create_text(200,50,font=('Fixedsys',12),text="Processing...")
+
+Button(root,text="Stop Simulation",font=("Fixedsys"),command=quitting).grid(row=4,column=1,sticky=N,padx=5)
+root.bind("<q>", maFonction6) # lettre q
+root.mainloop()
--- /dev/null
+# -*- coding: cp1252 -*-
+import sys
+from tkinter import *
+import os
+
+
+def maFonction6(event):
+ quitting()
+
+def quitting():
+ can1.delete(proceeding)
+ can1.create_text(200,50,font=('Fixedsys',12),text="If you want to quit press button again...")
+ Button(root,text="Stop Simulation",font=("Fixedsys"),command=really_quitting).grid(row=4,column=1,sticky=N,padx=5)
+
+def really_quitting():
+ print ('quitting')
+ root.destroy()
+
+# création d'une instance de la classe TK, que l'on affecte à l'objet "root"
+root = Tk()
+root.title("PSEN - Processing...")
+can1=Canvas(root,width=400,height=100,bg="light blue")
+can1.grid(row=0,column=0,rowspan=10)
+
+proceeding=can1.create_text(200,50,font=('Fixedsys',12),text="Processing...")
+
+Button(root,text="Stop Simulation",font=("Fixedsys"),command=quitting).grid(row=4,column=1,sticky=N,padx=5)
+root.bind("<q>", maFonction6) # lettre q
+root.mainloop()
+++ /dev/null
-ierr = psspy.add_details_to_opf_log(1)
-ierr = psspy.produce_opf_log_file(1,r'C:\Users\j15773\Documents\GTDosier\PSEN\Versions\PSEN_V14 - ec dispatch\Example\Results\LOG.log')
\ No newline at end of file
+++ /dev/null
-# -*- coding: cp1252 -*-
-#===============================================================================
-# PSEN SCRIPT FOR PROBABILISTIC STUDIES OF ELECTICAL NETWORKS
-#===============================================================================
-from pylab import *
-from math import*
-import os, random, sys,copy,multiprocessing
-import numpy as np
-import time #import gmtime, strftime, sleep
-from array import *
-import PSENconfig #file with Eficas output dictionaries
-from support_functionsPF import *
-import shutil
-import pdb
-import csv
-
-from openturns import * #decommenter apres
-InitializeDispatchGentoP0 = False
-# Debug = False
-Debug = True
-if __name__ == '__main__':
- start_total = time.clock();
- start = time.clock(); #++++++++++++++++++
-
-
- if Debug:
- cmd_Path=os.getcwd()+r'\usrCmdPF.py' #lancement depuis pssewrapper.py
- #cmd_Path=os.getcwd()+'\PSEN\usrCmd.py' #lancement depuis qteficas_psen.py
- else:
- cmd_Path=os.path.join(os.path.dirname(os.path.abspath(__file__)),"usrCmdPF.py")
- ##cmd_Path=os.getcwd()+'\EficasV1\PSEN_Eficas\PSEN\usrCmd.py' #lancement avec le .bat
-#===============================================================================
-# Recuperation donnees utilisateurs - User data
-#===============================================================================
- #extract laws from Eficas Output
- Paths = PSENconfig.Dico['DIRECTORY']
- SimuParams = PSENconfig.Dico['SIMULATION']
- PFParams = PSENconfig.Dico['PF_PARAMETERS']
-
- if 'CORRELATION' in PSENconfig.Dico:#sortir list de lawnames
- LawNames = RemoveListfromString(PSENconfig.Dico['CORRELATION']['CorrelationMatrix'][0])
- Laws = {}
- NonActiveIndices = []
- TSindices = []
- for key in PSENconfig.Dico.keys():
- if key[0:12] == 'DISTRIBUTION':
- shortkey = key[12:]
- if PSENconfig.Dico[key]['Activated']==True: #only take into account laws which are "activated"
- Laws[shortkey]= PSENconfig.Dico[key]
- if Laws[shortkey]['Law']=='PDF_from_file': #read contents of .csv file
- g=open(Laws[shortkey]['FileName'],"r")
- lines=g.readlines()
- g.close()
- Laws[shortkey]['FileContents']=lines
- elif Laws[shortkey]['Law']=='TimeSeries_from_file': #read contents of .csv file
- g=open(Laws[shortkey]['FileName'],"r")
- lines=g.readlines()
- g.close()
- Laws[shortkey]['FileContents']=lines
- if 'CORRELATION' in PSENconfig.Dico:
- TSindices.append(LawNames.index(shortkey))
- if isinstance(Laws[shortkey][Laws[shortkey]['ComponentType']],str):
- Laws[shortkey][Laws[shortkey]['ComponentType']]=[Laws[shortkey][Laws[shortkey]['ComponentType']]] #if only one entry, create list
- if 'TF_Input' in Laws[shortkey]: #If user inputted transfer function
- Laws[shortkey]['TransferFunction']=True
- else:
- Laws[shortkey]['TransferFunction']=False
- else:
- if 'CORRELATION' in PSENconfig.Dico:
- NonActiveIndices.append(LawNames.index(shortkey))
-
- if 'CORRELATION' in PSENconfig.Dico:
- #Treat Correlation Matrix - eliminate non-activated laws
- CorrMatrix0 = {}
- LawNames2 = []
-
- for i, lawname in enumerate(LawNames):
- if i not in NonActiveIndices:
- LawNames2.append(lawname)
- Cmax = PSENconfig.Dico['CORRELATION']['CorrelationMatrix'][1:]
- CMax = []
- for i,c in enumerate(Cmax):
- if i not in NonActiveIndices:
- c = RemoveListfromString(c)
- c = map(float,c)
- c2 = []
- for ind, c_el in enumerate(c):
- if ind not in NonActiveIndices:
- ## c2.append(c_el)
-
- #if time series, don't correlate other laws with the value "1".
- if (ind not in TSindices) and (i not in TSindices):
- c2.append(c_el)
- elif i==ind:
- c2.append(1.)
- else:
- c2.append(0.)
- CMax.append(c2)
-
- CorrMatrix0['matrix'] = np.array(CMax)
- CorrMatrix0['laws'] = LawNames2
-
- else: #acceptable only if all active distributions are time series or if only 1 active distribution
-
- if len(Laws)==1: #create correlation matrix of 1 x 1
- CorrMatrix0 = {}
- CorrMatrix0['matrix'] = np.array([[1]])
- CorrMatrix0['laws'] = Laws.keys()
- else: #>1 law, test if all TS
- allTS=True
- for key in Laws.keys():
- if Laws[key]['Law']!='TimeSeries_from_file':
- allTS=False
- if allTS:
- CorrMatrix0 = {}
- CorrMatrix0['matrix']=np.eye(len(Laws))
- CorrMatrix0['laws']=Laws.keys()
- else:
- print ('Error: Correlation matrix must be defined. Enter 0''s for correlations between laws and time series.')
- sys.exit(1)
-
- #Duplicate Laws for cases where 1 law defined for multiple components and different sampling should be performed per component:
- isDuplicateLaws = False
- for law in list(Laws.keys()):
- if 'One sample per ' in Laws[law]['Sampling']:
- isDuplicateLaws = True
- ComponentType = Laws[law]['ComponentType']
- ComponentList = Laws[law][ComponentType]
- for component in ComponentList:
- lawname = law + "_" + component
- Laws[lawname]=Laws[law].copy() #make a copy of the law
- Laws[lawname][ComponentType]=[component] #apply law to only one component, not whole list
- del Laws[law]
- else: #one sample for all components defined by law
- i = CorrMatrix0['laws'].index(law)
- if CorrMatrix0['matrix'][i][i] != 1:
- print( 'Error: Correlation must be 1 between law and itself for law with same sample for all components. (' + law + ')')
- sys.exit(1)
- #CorrMaxtrix0['matrix'][i][i] = 1
-
- #retreat CorrelationMatrix
- if isDuplicateLaws:
- CorrMatrix = {}
- CorrMatrix['laws']=Laws.keys()
- CorrMatrix['matrix']=np.eye(len(Laws.keys()))
- for x,lawname1 in enumerate(Laws.keys()):
- for i,lawname1_0 in enumerate(CorrMatrix0['laws']):
- if lawname1_0 in lawname1:
- break
- for y, lawname2 in enumerate(Laws.keys()):
- for j,lawname2_0 in enumerate(CorrMatrix0['laws']):
- if lawname2_0 in lawname2:
- break
- if x!=y:
- CorrMatrix['matrix'][x][y] = CorrMatrix0['matrix'][i][j]
- CorrMatrix['matrix'][y][x] = CorrMatrix0['matrix'][j][i]
-
- else:
- CorrMatrix = CorrMatrix0
- #retest for positive definiteness
- if not np.all(np.linalg.eigvals(CorrMatrix['matrix'])>0):
- print ('Error: Correlation matrix is not positive definite.')
- sys.exit(1)
- #execution file name
- exec_file="report.txt"
-
- # Treat Contingency Files enteres as CSVs
- LinesList = []
- GeneratorsList = []
- LoadsList = []
- TransformersList = []
- MotorsList = []
-
- if 'N_1_LINES' in PSENconfig.Dico:
- if PSENconfig.Dico['N_1_LINES']['Activated']==True:
- LinesList = PSENconfig.Dico['N_1_LINES']['Probability']
- if 'N_1_GENERATORS' in PSENconfig.Dico:
- if PSENconfig.Dico['N_1_GENERATORS']['Activated']==True:
- GeneratorsList = PSENconfig.Dico['N_1_GENERATORS']['Probability']
- if 'N_1_LOADS' in PSENconfig.Dico:
- if PSENconfig.Dico['N_1_LOADS']['Activated']==True:
- LoadsList = PSENconfig.Dico['N_1_LOADS']['Probability']
- if 'N_1_TRANSFORMERS' in PSENconfig.Dico:
- if PSENconfig.Dico['N_1_TRANSFORMERS']['Activated']==True:
- TransformersList = PSENconfig.Dico['N_1_TRANSFORMERS']['Probability']
- if 'N_1_MOTORS' in PSENconfig.Dico:
- if PSENconfig.Dico['N_1_MOTORS']['Activated']==True:
- MotorsList = PSENconfig.Dico['N_1_MOTORS']['Probability']
-
- try :
- continLines, continGroups, continTransfos, continLoads, continMotors, continVal, continProb = config_contingency(LinesList,GeneratorsList,TransformersList,LoadsList,MotorsList)
- except IOError : # Si le fichier n'est pas dans un bon format on traite l'exception
- nb_lines=1
- print ('Error with contingency input file')
- else :
- continLines, continGroups, continTransfos, continLoads, continMotors, continVal, continProb = config_contingency(LinesList,GeneratorsList,TransformersList,LoadsList,MotorsList)
-
- if len(continVal)>0:
- N_1_fromFile = True
- else:
- N_1_fromFile = False
-
- # Creation variable nom dossier N-1
- if N_1_fromFile == True :
- folderN_1 = '1_'
- else :
- folderN_1 = '_'
-
-
- # Definition des variables pour les series temporelles
-
- time_serie_flag=[]
- time_serie_mat=[]
- time_serie_time=[]
- timeVect = []
- for i,key in enumerate(CorrMatrix['laws']) :
- if Laws[key]['Law']=='TimeSeries_from_file':
- linesTS = Laws[key]['FileContents']
- time_serie = 1 #raise the flag time_serie
- tsm=[]
- tVect=[]
- for j in range (len(linesTS)) :
- try:
- tsm.append(float(commaToPoint(linesTS[j].split(';')[1])))
- tVect.append(linesTS[j].split(';')[0])
- except :
- pass
- time_serie_time.append(tVect)
- time_serie_flag.append(1)
- time_serie_mat.append(tsm)
- else:
- time_serie_flag.append(-1)
- if N_1_fromFile==True:
- time_serie_flag.append(-1)
-
- #find shortest time series column
- try:
- time_serie
- timeVect = time_serie_time[0]
- for index, tV in enumerate(time_serie_time):
- if len(tV) < len(timeVect):
- timeVect = tV
- except NameError:
- pass
-
- #change time Vector into iteration numbers (otherwise difficult for post processing)
- N = len(timeVect)
- timeVect = range(1, N+1)
-
- time_serie_mat=list(zip(*time_serie_mat))
-
- # Probabilistic Study: central dispersion => Monte Carlo or LHS iterations
- if 'NUMBER_PACKAGE' in SimuParams:
- nb_fix = int(SimuParams['NUMBER_PACKAGE'])
- elif 'CONVERGENCE' in SimuParams:
- if SimuParams['CONVERGENCE']==1:
- nb_fix=0
- else:
- nb_fix=100
- print ('\nALERT:\nConvergence not selected, and no number of packages chosen: default number= 100')
- time.sleep(2)
- #Extension name for the folders and files
- day=time.strftime("%Y%m%d", time.gmtime())
- hour=time.strftime("%Hh%Mm%S", time.gmtime())
- # Enregistrement de l'heure de debut de simulation
- f=open(exec_file, 'a')
- start_time=time.clock()
- f.write("Starting time: %f; Monte Carlo Size : %f; " % (start_time, SimuParams["SIZE_PACKAGE"]))
- f.close()
-
- try:
- time_serie
- except NameError:
- num_cores=multiprocessing.cpu_count()-1
- num_cores=1#Valentin
- else:
- num_cores=multiprocessing.cpu_count()
- num_cores=1#Valentin
-
- # Initialize the big folder
- pathBigFolder = Paths['results_folder']+"/N"+folderN_1+day+"_"+hour
- if not os.path.exists(pathBigFolder): os.makedirs(pathBigFolder)
-
- #folder=Paths['results_folder']+"/N"+folderN_1+day #big folder
- for j in range(num_cores):
- # Initialize a folder per core
- pathSmallFolder = pathBigFolder+'\package'+str(j)+"_N"+folderN_1+day+"_"+hour
- if not os.path.exists(pathSmallFolder): os.makedirs(pathSmallFolder)
-
-
- path_save = os.path.join(pathBigFolder, 'package0' + "_N" + folderN_1 + day + "_" + hour)
- filew = open('temp1.txt', 'w')
- filew.write(path_save + '\n')# sauvegarder le path de travail
- filew.close()
- stop = time.clock(); print(' Traitement PSENConfig ' + str(round(stop - start, 3)) + ' seconds'); start = stop;
- Python3_path=PSENconfig.Dico['DIRECTORY']['Python3_path']
- lancer = [Python3_path + '/python.exe',os.path.dirname(os.path.realpath(__file__))+ '/read_pfd_wrapper.py'] # changer le chemin de Python3 executable
- proc = subprocess.Popen(lancer)
- proc.wait()
- stop = time.clock(); print('run read_pfd_wrapper.py in ' + str(round(stop - start, 3)) + ' seconds'); start = stop;
-
-
- with open('param_base', 'rb') as fichier:
- mon_depickler = pickle.Unpickler(fichier)
- all_inputs_init= mon_depickler.load()
- os.remove('param_base')
- buses_base=all_inputs_init[0]
- lines_base=all_inputs_init[1]
- trans_base=all_inputs_init[2]
- plants_base=all_inputs_init[3]
- loads_base=all_inputs_init[4]
- shunt_base=all_inputs_init[5]
- motors_base=all_inputs_init[6]
- trans3_base=all_inputs_init[7]
- swshunt_base=all_inputs_init[8]
-
-
-########///////////////////////////////////////////////////////////##########
- # Initialize size output
- sizeY0=len(plants_base) #np.matrix(plants_base).shape[0]
- sizeY1=len(buses_base) #np.matrix(buses_base).shape[0]
- sizeY2=len(lines_base) #np.matrix(lines_base).shape[0]
- sizeY3=len(loads_base) #np.matrix(loads_base).shape[0]
- sizeY4=len(shunt_base) #np.matrix(shunt_base).shape[0]
- sizeY5=len(trans_base) #np.matrix(trans_base).shape[0]
- sizeY6=len(motors_base) #np.matrix(motors_base).shape[0]
- sizeY7=len(trans3_base)
- sizeY8=len(swshunt_base) #np.matrix(shunt_base).shape[0]
- sizeY=[sizeY0,sizeY1,sizeY2,sizeY5,sizeY7,sizeY3,sizeY6,sizeY4,sizeY8]
- sizeOutput=sizeY2
-
- # Initialize the logger : write the headers
- entete = ""
- unit = ""
- for key in CorrMatrix['laws']:
- if Laws[key]['ComponentType']=='Generator':
- if Laws[key]['Type']=='Generator Unavailability':
- entete+="X:genStatus" + key + ";"
- unit += ";"
- else:
- entete+="X:Gen" + key + "(%Pnom);"
- unit += "%Pnom;"
- elif Laws[key]['ComponentType']=='Load':
- if Laws[key]['Type']=='Load Unavailability':
- entete+="X:loadStatus" + key + ";"
- unit += ";"
- else:
- entete+="X:Load" + key + "(p.u.);"
- unit += "p.u.;"
- elif Laws[key]['ComponentType']=='Line':
- entete+="X:lineStatus" + key + ";"
- unit += ";"
- elif Laws[key]['ComponentType']=='Transformer':
- entete+="X:transfoStatus" + key + ";"
- unit += ";"
- elif Laws[key]['ComponentType']=='Motor':
- entete+="X:motorStatus" + key + ";"
- unit += ";"
- if N_1_fromFile==True:
- entete += "X:N-1;"
- unit += "component disconnected;"
- entete2=entete + ";Y:NumTransitLine;Y:NumTransitTr;Y:NumVoltage;Y:GenTot;Y:LoadTot;Y:%Losses;Y:Max%ALine;Y:Max%ATr;Y:NumTransit_0.9-1Line;Y:NumTransit_0.9-1Tr;Y:AddedMVAR;Y:LoadShedding;Y:GensDisconnected;;"
- if PFParams['ALGORITHM']=='Optimum Power Flow':
- entete += ";Y:NumTransitLine;Y:NumTransitTr;Y:NumVoltage;Y:GenTot;Y:LoadTot;Y:%Losses;Y:Max%ALine;Y:Max%ATr;Y:NumTransit_0.9-1Line;Y:NumTransit_0.9-1Tr;Y:AddedMVAR;Y:LoadShedding;;"
-
- unit2= unit + ';Num;Num;Num;MW;MW;%;%;%;Num;Num;MVAR;MW;[(bus, id),...];;'
- if PFParams['ALGORITHM']=='Optimum Power Flow':
- unit += ';Num;Num;Num;MW;MW;%;%;%;Num;Num;MVAR;MW;;'
-
- string = "Iteration;;" + entete
- unitstring = "Num;;" + unit
- string2 = "Iteration;;" + entete2
- unitstring2 = "Num;;" + unit2
-
- logCSVfilename=[]
- logCSVfilename_UC=[]
- for i in range(num_cores):
- logCSVfilename.append(pathBigFolder+'/package'+str(i)+"_N"+folderN_1+day+ "_" + hour + "/simulationDClog_"+hour+".csv") # Name of the file : global variable
- logCSVfilename_UC.append(pathBigFolder+'/package'+str(i)+"_N"+folderN_1+day+ "_" + hour + "/simulationDClog_beforeUC_"+hour+".csv") # Name of the file : global variable
- f = open(logCSVfilename[i], "a")
- f2 = open(logCSVfilename_UC[i], "a")
-
- f.write(string)
- f2.write(string2)
-
- # Names of the Output variables with the bus number
- for name in range (sizeY0):
- f.write("Y:PMachine"+str(plants_base[name][0])+"id"+ str(plants_base[name][2])+ ";")
- f2.write("Y:PMachine"+str(plants_base[name][0])+"id"+ str(plants_base[name][2])+ ";")
- for name in range (sizeY0):
- f.write("Y:QMachine"+str(plants_base[name][0])+"id"+ str(plants_base[name][2])+";")
- f2.write("Y:QMachine"+str(plants_base[name][0])+"id"+ str(plants_base[name][2])+";")
- for name in range (sizeY1):
- f.write("Y:VBus"+str(buses_base[name][0])+";")
- f2.write("Y:VBus"+str(buses_base[name][0])+";")
- for name in range (sizeY2):
- f.write("Y"+str(name+1)+":%Rate "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
- f2.write("Y"+str(name+1)+":%Rate "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
- for name in range (sizeY2):
- f.write("Y"+str(name+1)+":P "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
- f2.write("Y"+str(name+1)+":P "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
- for name in range (sizeY2):
- f.write("Y"+str(name+1)+":Q "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
- f2.write("Y"+str(name+1)+":Q "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
- for name in range (sizeY5):
- f.write("Y"+str(name+1)+":Tr%Rate "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
- f2.write("Y"+str(name+1)+":Tr%Rate "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
- for name in range (sizeY5):
- f.write("Y"+str(name+1)+":TrP "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
- f2.write("Y"+str(name+1)+":TrP "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
- for name in range (sizeY5):
- f.write("Y"+str(name+1)+":TrQ "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
- f2.write("Y"+str(name+1)+":TrQ "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
-
- for name in range (sizeY7):
- f.write("Y"+str(name+1)+":Tr3%Rate "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
- f2.write("Y"+str(name+1)+":Tr3%Rate "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
- for name in range (sizeY7):
- f.write("Y"+str(name+1)+":Tr3P "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
- f2.write("Y"+str(name+1)+":Tr3P "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
- for name in range (sizeY7):
- f.write("Y"+str(name+1)+":Tr3Q "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
- f2.write("Y"+str(name+1)+":Tr3Q "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
- for name in range (sizeY3):
- f.write("Y:Load "+str(loads_base[name][0])+" id"+ str(loads_base[name][5])+";")
- f2.write("Y:Load "+str(loads_base[name][0])+" id"+ str(loads_base[name][5])+";")
- for name in range (sizeY6):
- f.write("Y:MotorP "+str(motors_base[name][0])+" id"+ str(motors_base[name][5])+";")
- f2.write("Y:MotorP "+str(motors_base[name][0])+" id"+ str(motors_base[name][5])+";")
- for name in range (sizeY6):
- f.write("Y:MotorQ "+str(motors_base[name][0])+" id"+ str(motors_base[name][5])+";")
- f2.write("Y:MotorQ "+str(motors_base[name][0])+" id"+ str(motors_base[name][5])+";")
- for name in range (sizeY4):
- f.write("Y:Shunt bus "+str(shunt_base[name][0])+" id"+ str(shunt_base[name][5])+";")
- f2.write("Y:Shunt bus "+str(shunt_base[name][0])+" id"+ str(shunt_base[name][5])+";")
- for name in range (sizeY8):
- f.write("Y:Sw shunt bus "+str(swshunt_base[name][0])+";")
- f2.write("Y:Sw shunt bus "+str(swshunt_base[name][0])+";")
- f.write("\n")
- f2.write("\n")
- # Names of the Output variables with the bus names
- f.write(unitstring)
- f2.write(unitstring2)
- for name in range (sizeY0):
- f.write(str(plants_base[name][8]).replace('\n','')+";")
- f2.write(str(plants_base[name][8]).replace('\n','')+";")
- for name in range (sizeY0):
- f.write(str(plants_base[name][8]).replace('\n','')+";")
- f2.write(str(plants_base[name][8]).replace('\n','')+";")
- for name in range (sizeY1):
- f.write(str(buses_base[name][3]).replace("\n",'')+";")
- f2.write(str(buses_base[name][3]).replace("\n",'')+";")
- for name in range (sizeY2):
- f.write(str(lines_base[name][8]).replace("\n",'').replace("-","_")+ " - " +str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- f2.write(str(lines_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- for name in range (sizeY2):
- f.write(str(lines_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- f2.write(str(lines_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- for name in range (sizeY2):
- f.write(str(lines_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- f2.write(str(lines_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- for name in range (sizeY5):
- f.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- f2.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- for name in range (sizeY5):
- f.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- f2.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- for name in range (sizeY5):
- f.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- f2.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- for name in range (sizeY7):
- f.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
- f2.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
- for name in range (sizeY7):
- f.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
- f2.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
- for name in range (sizeY7):
- f.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
- f2.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
- for name in range (sizeY3):
- f.write(str(loads_base[name][4]).replace("\n",'')+";")
- f2.write(str(loads_base[name][4]).replace("\n",'')+";")
- for name in range (sizeY6):
- f.write(str(motors_base[name][4]).replace("\n",'')+";")
- f2.write(str(motors_base[name][4]).replace("\n",'')+";")
- for name in range (sizeY6):
- f.write(str(motors_base[name][4]).replace("\n",'')+";")
- f2.write(str(motors_base[name][4]).replace("\n",'')+";")
- for name in range (sizeY4):
- f.write(str(shunt_base[name][3]).replace("\n",'')+";")
- f2.write(str(shunt_base[name][3]).replace("\n",'')+";")
- for name in range (sizeY8):
- f.write(str(swshunt_base[name][3]).replace("\n",'')+";")
- f2.write(str(swshunt_base[name][3]).replace("\n",'')+";")
- f.write("\n")
- f2.write("\n")
- f.close()
- f2.close()
-
- if not PFParams['UNIT_COMMITMENT']:
- for filename in logCSVfilename_UC:
- os.remove(filename)
-
- # Definition of size input/output
- inputDim = len(Laws.keys())+ int(N_1_fromFile)
- outputDim = 12
-
- N_1_LINES = []
- if ('N_1_LINES' in PSENconfig.Dico):
- if PSENconfig.Dico['N_1_LINES']['Activated'] == True:
- for N1 in PSENconfig.Dico['N_1_LINES']['Probability']:
- if N1[1] != 0:
- N_1_LINES.append(N1[0])
- N_1_TRANSFORMERS = []
- if ('N_1_TRANSFORMERS' in PSENconfig.Dico):
- if PSENconfig.Dico['N_1_TRANSFORMERS']['Activated'] == True:
- for N1 in PSENconfig.Dico['N_1_TRANSFORMERS']['Probability']:
- if N1[1] != 0:
- N_1_TRANSFORMERS.append(N1[0])
- N_1_MOTORS = []
- if ('N_1_MOTORS' in PSENconfig.Dico):
- if PSENconfig.Dico['N_1_MOTORS']['Activated'] == True:
- for N1 in PSENconfig.Dico['N_1_MOTORS']['Probability']:
- if N1[1] != 0:
- N_1_MOTORS.append(N1[0])
- N_1_LOADS = []
- if ('N_1_LOADS' in PSENconfig.Dico):
- if PSENconfig.Dico['N_1_LOADS']['Activated'] == True:
- for N1 in PSENconfig.Dico['N_1_LOADS']['Probability']:
- if N1[1] != 0:
- N_1_LOADS.append(N1[0])
- N_1_GENERATORS = []
- if ('N_1_GENERATORS' in PSENconfig.Dico):
- if PSENconfig.Dico['N_1_GENERATORS']['Activated'] == True:
- for N1 in PSENconfig.Dico['N_1_GENERATORS']['Probability']:
- if N1[1] != 0:
- N_1_GENERATORS.append(N1[0])
-
-
- #Create dictionnary for different useful values to use psse function
- dico={'TStest':0,'Xt':[],'sizeY0':sizeY0,'sizeY1':sizeY1,'sizeY2':sizeY2,\
- 'sizeY3':sizeY3,'sizeY4':sizeY4,'sizeY5':sizeY5,'sizeY6':sizeY6,'sizeY7':sizeY7,'sizeY8':sizeY8, 'sizeY':sizeY,\
- 'folder':pathBigFolder,'folderN_1':folderN_1,\
- 'day':day,'hour':hour, 'position':0,'PFParams': PFParams,\
- 'lenpac':SimuParams['SIZE_PACKAGE'],\
- 'num_pac':0,'logCSVfilename':logCSVfilename,'logCSVfilename_UC':logCSVfilename_UC,'Laws':Laws,'CorrMatrix': CorrMatrix,\
- 'Generators':PSENconfig.MachineDico, 'Loads':PSENconfig.LoadDico, 'Motors':PSENconfig.MotorDico,\
- 'Lines':PSENconfig.LineDico, 'Transformers':PSENconfig.TransfoDico,\
- 'doc_base':'','continLines':continLines,'continTransfos':continTransfos,'timeVect':[],\
- 'continGroups':continGroups,'continLoads':continLoads,'continMotors':continMotors,'continVal':continVal,'continProb':continProb,\
- 'N_1_fromFile': N_1_fromFile,'all_inputs_init':all_inputs_init,'N_1_LINES':N_1_LINES, 'N_1_TRANSFORMERS':N_1_TRANSFORMERS,'N_1_MOTORS':N_1_MOTORS,'N_1_LOADS':N_1_LOADS,'N_1_GENERATORS':N_1_GENERATORS,'Paths':Paths}
-
- if PFParams["ALGORITHM"]=="Optimum Power Flow":
- dico['flag2']=int(PFParams['LS_Q_CONVERGENCE_CRITERIA'])
- dico['UnitCommitment']= PFParams['UNIT_COMMITMENT']
- else:
- dico['flag2']=False
- dico['UnitCommitment']=False
-#===============================================================================
-# EXECUTION
-#===============================================================================
- print ("\n\n\n Starting PSEN ")
-
- # inputSamp=[]
- outputSampleAll=NumericalSample(0,12)#initialization
- ymachine=NumericalSample(0,sizeY0)
-
- try :
- time_serie
- print('Time series')
- dico['TStest']=1
- Xt=[]
- for i in range (len(time_serie_mat)) : #as many as there are points in the time serie
-
- Xt0=[]
- n=0
- for j in range (len(time_serie_flag)) : #for each variable
-
- if time_serie_flag[j] == -1 : #if not a time series
- Xt0.append(-1)
- n+=1
- else :
- Xt0.append(time_serie_mat[i][j-n]) #append the element
-
- Xt.append(Xt0)
- dico['Xt']=Xt
- dico['timeVect']=timeVect[0:len(Xt)]
- dico['lenpac']=len(Xt)
- nb_fix = 1
-
-
- except NameError :
- print ('Probabilistic')
-
-
- dico['doc_base'] = os.path.join(pathBigFolder, 'package0' + "_N" + folderN_1 + day + "_" + hour)
-
- liste_dico = []
- liste_dico.append(dico.copy())
- os.environ['PATH'] += ';' + dico['doc_base'] # add the path of each directory
- Ind1, Ind2, output, inputSamp, Pmachine=Calculation(liste_dico[0].copy(),nb_fix,cmd_Path)# lancer les calculs OPF
-
-
-# try :
-# time_serie
-# except NameError :
-# print ('Probabilistic')
-# dico['doc_base'] = os.path.join(pathBigFolder, 'package0' + "_N" + folderN_1 + day + "_" + hour)
-#
-# liste_dico = []
-# liste_dico.append(dico.copy())
-# os.environ['PATH'] += ';' + dico['doc_base'] # add the path of each directory
-# Ind1, Ind2, output, inputSamp, Pmachine=Calculation(liste_dico[0].copy(),nb_fix,cmd_Path)# lancer les calculs OPF
-#
-#
-# else:
-# print('Time series')
-# dico['TStest']=1
-# Xt=[]
-# for i in range (len(time_serie_mat)) : #as many as there are points in the time serie
-#
-# Xt0=[]
-# n=0
-# for j in range (len(time_serie_flag)) : #for each variable
-#
-# if time_serie_flag[j] == -1 : #if not a time series
-# Xt0.append(-1)
-# n+=1
-# else :
-# Xt0.append(time_serie_mat[i][j-n]) #append the element
-#
-# Xt.append(Xt0)
-#
-# liste_dico=[]
-# ipos=0
-#
-# RandomGenerator.SetSeed(os.getpid())
-# inputDistribution=create_dist(dico)
-# samples=[]
-#
-# dico['doc_base'] = os.path.join(pathBigFolder, 'package0' + "_N" + folderN_1 + day + "_" + hour)
-#
-# dico['Xt']=Xt
-# dico['timeVect']=timeVect[0:len(Xt)]
-## dico['Xt']=Xt[ipos:int(((i+1)*round(float(len(Xt))/float(num_cores))))]
-## dico['timeVect']=timeVect[ipos:int(((i+1)*round(float(len(Xt))/float(num_cores))))]
-## ipos=int(((i+1)*round(float(len(Xt))/float(num_cores))))
-#
-# myMCE = MonteCarloExperiment(inputDistribution,len(dico['Xt']))
-# Samp = myMCE.generate()
-# samples.append(Samp)
-#
-# liste_dico.append(dico.copy()) #append a new dico to the list
-# os.environ['PATH'] += ';' + dico['doc_base'] #add the path of each directory
-#
-# inputSamp, output, Pmachine, LS, FS, LStable, FStable, Output_beforeUC, Pmachine_beforeUC, LS_beforeUC, FS_beforeUC, LStable_beforeUC, FStable_beforeUC = PFFunct(liste_dico[0].copy(),np.array(samples[0]))
-#
-## for l in range(num_cores):
-## print "launching PACKAGE "+str(l)
-## p= po.apply_async(PSSEFunct,args=(liste_dico[l].copy(),np.array(samples[l]),),\
-## callback=function_callback_psse) #callback function
-
-
-
-#===============================================================================
-# RECUPERATION DONNEES DE SORTIES ET ECRITURE CSV - OUTPUT RETRIEVAL
-#===============================================================================
-
- print( "Finished multiprocessing")
-
- for i in Pmachine:
- ymachine.add(NumericalPoint(i))
- ymachineMean=ymachine.computeMean()
-
- for i in output:
- outputSampleAll.add(NumericalPoint(i))
- outputDim=outputSampleAll.getDimension()
- outputSize=outputSampleAll.getSize()
-
- inputSample=NumericalSample(0,inputDim)
- for i in inputSamp:
- inputSample.add(NumericalPoint(i))
-
- outputSample=NumericalSample(0,outputDim)
- outputSampleMissed=NumericalSample(0,outputDim)
-
- for i in range (outputSize):
- #if outputSampleAll[i,inputDim]==0 :
- if outputSampleAll[i,3]==0 :
- outputSampleMissed.add(outputSampleAll[i])
- else :
- outputSample.add(outputSampleAll[i])
-
- outputDescription=[]
- for i in range (outputDim):
- outputDescription.append("Y"+str(i))
- outputSample.setDescription( outputDescription )
-
- # Get the empirical mean and standard deviations
- empMeanX = inputSample.computeMean()
- empSdX = inputSample.computeStandardDeviationPerComponent()
-
- if int(outputSample.getSize())>0:
- empiricalMean = outputSample.computeMean()
- empiricalSd = outputSample.computeStandardDeviationPerComponent()
- else:
- print ("ALERT: Not a single scenario converged")
- empiricalMean = ["-"]*outputDim
- empiricalSd = ["-"]*outputDim
-
- # Writing
- CSVfilename=pathBigFolder+"\simulation_interestValues"+hour+".csv" # Name of the file : global variable
- f = open(CSVfilename, "a")
- f.write('CASES SIMULATED: '+str(outputSize)+'\n\n')
-
- f.write(';;Mean;Standard deviation\n')
-
- entete=entete.split(';')
- unit=unit.split(';')
-
- for name in range (inputDim+outputDim+sizeY0):
-
- if (name<inputDim):
- f.write(entete[name]+';'+unit[name]+';'+\
- str(empMeanX[name])+';'+str(empSdX[name])+'\n')
- if name==inputDim:
- f.write('\n')
-## f.write('\n'+entete[name]+';'+unit[name]+';'\
-## +str(empiricalMean[name-inputDim])+';'+\
-## str(empiricalSd[name-inputDim])+'\n')
- if (inputDim<name<inputDim+outputDim):
- #pdb.set_trace()
- f.write(entete[name]+';'+unit[name]+';'\
- +str(empiricalMean[name-inputDim-1])+';'+\
- str(empiricalSd[name-inputDim-1])+'\n')
- if name==(inputDim+outputDim):
- f.write("\nY:PMachine"+str(plants_base[name-(inputDim+outputDim)][0])+";"\
- +str(plants_base[name-(inputDim+outputDim)][8])+';'+\
- str(ymachineMean[name-(inputDim+outputDim)])+"\n")
- if (inputDim+outputDim<name):
- f.write("Y:PMachine"+str(plants_base[name-(inputDim+outputDim)][0])+";"\
- +str(plants_base[name-(inputDim+outputDim)][8])+';'+\
- str(ymachineMean[name-(inputDim+outputDim)])+"\n")
-
- if (int(PFParams['LS_Q_CONVERGENCE_CRITERIA'])): #if criteria on Load shed and mvar
- f.write('\n\nIndicator Load Shedding=;')
-
- f.write('Indicator Fixed Shunt=;')
-
- else:
- f.write('\n\nIndicator NumVoltage=;')
-
- f.write('Indicator NumTransit=;')
-
- f.write('\n')
- for i in range(len(Ind1)):
- f.write(str(Ind1[i])+';')
- f.write(str(Ind2[i])+'\n')
-
- f.close()
-
- CSVcomplete_filename=pathBigFolder+"\simulationDClog_complete_"+hour+".csv" # Name of the file : global variable
- f=open(CSVcomplete_filename,"a")
-
- # liste_dico2 = []
- # for k,dico in enumerate(liste_dico):
- # package_folder = dico['doc_base']
- # if os.path.isfile(os.path.join(dico['doc_base'],'Case_1.sav')):
- # liste_dico2.append(dico)
- # else:
- # shutil.rmtree(dico['doc_base'])
-
- if dico['TStest']==1: #if Time series, different output file format
- for k,dico in enumerate(liste_dico):
- package_folder = dico['doc_base']
- package_resultsfile = package_folder + "\\simulationDClog_" + hour + ".csv"
- g = open(package_resultsfile,"r")
-
- if k==0:
- f.write(g.read())
- else:
- g_contents = g.read()
- g_contents2 = g_contents.split('\n')
- g_contents_noheaders = '\n'.join(g_contents2[2:])
-## g_contents_noheaders = ''
-## for m in range(2,len(g_contents2)):
-## g_contents_noheaders+=g_contents2[m] + '\n'
- f.write(g_contents_noheaders)
- g.close()
-
- else: #if probabilistic, must treat table output
- for k,dico in enumerate(liste_dico):
- package_folder = dico['doc_base']
- package_resultsfile = package_folder + "\\simulationDClog_" + hour + ".csv"
- g = open(package_resultsfile,"r")
-
- if k==0:
- g_contents=g.read()
- g_headers = g_contents.partition('\n')[0] + "\n"
- g_contents0 = g_contents.partition('\n')[2]
- g_headers += g_contents0.partition('\n')[0] + "\n"
- g_contents_noheaders = g_contents0.partition('\n')[2]
- g_iterations = g_contents_noheaders.partition('\n\n')[0]
- it_num = len(g_iterations.split('\n'))
- g_summarytable = g_contents_noheaders.partition('\n\n')[2]
- f.write(g_headers)
- f.write(g_iterations)
- f.write('\n')
- else:
- g_contents = g.read()
- g_contents_noheaders0 = g_contents.partition('\n')[2]
- g_contents_noheaders = g_contents_noheaders0.partition('\n')[2]
- g_iterations = g_contents_noheaders.partition('\n\n')[0]
- g_summarytable2 = g_contents_noheaders.partition('\n\n')[2]
- for line in g_summarytable2.split('\n')[2:]:
- if line != '':
- g_summarytable += line
- g_iterations_newnumbers = ""
- for line in g_iterations.split("\n"): #increment iteration numbers
- it_num += 1
- cells=line.split(';')
- cells[0]=str(it_num)
- newline=";".join(cells)+'\n'
- g_iterations_newnumbers+=newline
- f.write(g_iterations_newnumbers)
- g.close()
-
- f.write('\n\n' + g_summarytable) #write summary table at end
-
- f.close()
-
- if PFParams['ALGORITHM']=='Optimum Power Flow':
- if PFParams['UNIT_COMMITMENT']:
- # Write the second csv
- CSVcomplete_filename=pathBigFolder+"\simulationDClog_beforeUC_complete_"+hour+".csv" # Name of the file : global variable
- f=open(CSVcomplete_filename,"a")
- if dico['TStest']==1: #if Time series, different output file format
- for k,dico in enumerate(liste_dico):
- package_folder = dico['doc_base']
- package_resultsfile = package_folder + "\\simulationDClog_beforeUC_" + hour + ".csv"
- g = open(package_resultsfile,"r")
-
- if k==0:
- f.write(g.read())
- else:
- g_contents = g.read()
- g_contents2 = g_contents.split('\n')
- g_contents_noheaders = '\n'.join(g_contents2[2:])
- f.write(g_contents_noheaders)
- g.close()
-
- else: #if probabilistic, must treat table output
- for k,dico in enumerate(liste_dico):
- ExtraNL = False
- package_folder = dico['doc_base']
- package_resultsfile = package_folder + "\\simulationDClog_beforeUC_" + hour + ".csv"
- g = open(package_resultsfile,"r")
-
- if k==0:
- g_contents=g.read()
- g_headers = g_contents.partition('\n')[0] + "\n"
- g_contents0 = g_contents.partition('\n')[2]
- g_headers += g_contents0.partition('\n')[0] + "\n"
- g_contents_noheaders = g_contents0.partition('\n')[2]
- g_iterations = g_contents_noheaders.partition('\n\n')[0]
- g_iterations_split = g_iterations.split('\n')
- if g_iterations_split[-1]=="":
- g_iterations_split = g_iterations_split[0:-1]
- it_num = len(g_iterations_split)
- g_summarytable = g_contents_noheaders.partition('\n\n')[2]
- f.write(g_headers)
- #f.write(g_iterations)
- for line in g_iterations_split:
- f.write(line)
- f.write('\n')
- #f.write('\n')
- else:
- g_contents = g.read()
- g_contents_noheaders0 = g_contents.partition('\n')[2]
- g_contents_noheaders = g_contents_noheaders0.partition('\n')[2]
- g_iterations = g_contents_noheaders.partition('\n\n')[0]
- g_iterations_split = g_iterations.split('\n')
- if g_iterations_split[-1]=="":
- g_iterations_split = g_iterations_split[0:-1]
- g_summarytable2 = g_contents_noheaders.partition('\n\n')[2]
- for line in g_summarytable2.split('\n')[2:]:
- if line != '':
- g_summarytable += line
- g_iterations_newnumbers = ""
- for line in g_iterations_split: #increment iteration numbers
- it_num += 1
- cells=line.split(';')
- cells[0]=str(it_num)
- newline=";".join(cells)+'\n'
- g_iterations_newnumbers+=newline
- f.write(g_iterations_newnumbers)
- g.close()
-
- f.write('\n\n' + g_summarytable) #write summary table at end
-
- f.close()
-
-
- #convert decimal separator to commas for csv files
- if PFParams['DECIMAL_SEPARATOR']==",":
- csvlist = []
- for path, subdirs, files in os.walk(pathBigFolder):
- for name in files:
- if name.endswith(".csv"):
- csvlist.append(os.path.join(path, name))
- for csvfile in csvlist:
- h = open(csvfile,"r")
- crd = csv.reader(h,delimiter=";")
- csvfiletemp = csvfile[0:-4] + "0" + ".csv"
- g = open(csvfiletemp, "w", newline='\n')
- cwt = csv.writer(g, delimiter=";")
- for row in crd:
- rowwcommas = []
- for item in row:
- try:
- isnum = float(item)+1
- rowwcommas.append(str(item).replace(".",","))
- except:
- rowwcommas.append(item)
- cwt.writerow(rowwcommas)
- h.close()
- g.close()
- os.remove(csvfile)
- shutil.copy2(csvfiletemp, csvfile)
- os.remove(csvfiletemp)
-
- f=open(exec_file,'a')
- stop_time=time.clock()
- stop_time=time.clock()
- f.write("Stop time: %f; Duration: %f; Time per execution: %f; " \
- % (round(stop_time), round(stop_time-start_time), round((stop_time-start_time)/outputSize)))
- f.write("\n\n")
- f.close()
-
- print('\n\nSimulated '+str(outputSize)+' cases in '+ str(round(stop_time-start_time))+\
- ' seconds. Average '+str(round((stop_time-start_time)/outputSize,2))+'s per case.')
-
- nMissed=int(outputSampleMissed.getSize())
-
- print ('\n\n Non-convergence rate is '+str(round(nMissed*100/outputSize,3))\
- +' % ('+str(outputSampleMissed.getSize())+' cases out of '+str(outputSize)+')')
-
- #graphical_out(inputSample, outputSampleAll, inputDim, outputDim, montecarlosize)
-stop_total = time.clock();
-print('run total in '+ str(round(stop_total - start_total, 3)) + ' seconds');
+++ /dev/null
-MachineDico = {'WIND30__Gr1': {'PMIN': 0.0, 'EXNAME': 'WIND30 30.000', 'NAME': 'WIND30', 'NUMBER': 18, 'QMAX': 0.0, 'Q': 0.0, 'P': 20.0, 'QMIN': 0.0, 'ID': '1 ', 'PMAX': 20.0}, 'NDIESELG1__Gr1': {'PMIN': 0.0, 'EXNAME': 'NDIESELG1 11.000', 'NAME': 'NDIESELG1', 'NUMBER': 6, 'QMAX': 10.235971450805664, 'Q': 0.14257816970348358, 'P': 10.647665023803711, 'QMIN': -7.048243522644043, 'ID': '1 ', 'PMAX': 17.100000381469727}, 'HYDRO30__Gr1': {'PMIN': 0.0, 'EXNAME': 'HYDRO30 30.000', 'NAME': 'HYDRO30', 'NUMBER': 16, 'QMAX': 24.0, 'Q': 0.0001832990237744525, 'P': 40.0, 'QMIN': 0.0, 'ID': '1 ', 'PMAX': 40.0}, 'SOLAR30__Gr1': {'PMIN': 0.0, 'EXNAME': 'SOLAR30 30.000', 'NAME': 'SOLAR30', 'NUMBER': 19, 'QMAX': 0.0, 'Q': 0.0, 'P': 15.000000953674316, 'QMIN': 0.0, 'ID': '1 ', 'PMAX': 15.000000953674316}, 'NDIESELG3__Gr1': {'PMIN': 0.0, 'EXNAME': 'NDIESELG3 11.000', 'NAME': 'NDIESELG3', 'NUMBER': 8, 'QMAX': 10.235971450805664, 'Q': 0.14257816970348358, 'P': 10.647665023803711, 'QMIN': -7.048243522644043, 'ID': '1 ', 'PMAX': 17.100000381469727}, 'NDIESELG2__Gr1': {'PMIN': 0.0, 'EXNAME': 'NDIESELG2 11.000', 'NAME': 'NDIESELG2', 'NUMBER': 7, 'QMAX': 10.235971450805664, 'Q': 0.14257816970348358, 'P': 10.647665023803711, 'QMIN': -7.048243522644043, 'ID': '1 ', 'PMAX': 17.100000381469727}, 'NDIESELG4__Gr1': {'PMIN': 0.0, 'EXNAME': 'NDIESELG4 11.000', 'NAME': 'NDIESELG4', 'NUMBER': 9, 'QMAX': 10.235971450805664, 'Q': 0.14257816970348358, 'P': 10.647665023803711, 'QMIN': -7.048243522644043, 'ID': '1 ', 'PMAX': 17.100000381469727}, 'ODIESELG2__Gr1': {'PMIN': 0.0, 'EXNAME': 'ODIESELG2 11.000', 'NAME': 'ODIESELG2', 'NUMBER': 2, 'QMAX': 8.220000267028809, 'Q': 3.820113182067871, 'P': 4.771888484356168e-07, 'QMIN': -6.849999904632568, 'ID': '1 ', 'PMAX': 13.699999809265137}, 'ODIESELG4__Gr1': {'PMIN': 0.0, 'EXNAME': 'ODIESELG4 11.000', 'NAME': 'ODIESELG4', 'NUMBER': 4, 'QMAX': 8.220000267028809, 'Q': 3.820113182067871, 'P': 4.771888484356168e-07, 'QMIN': -6.849999904632568, 'ID': '1 ', 'PMAX': 13.699999809265137}, 'ODIESELG3__Gr1': {'PMIN': 0.0, 'EXNAME': 'ODIESELG3 11.000', 'NAME': 'ODIESELG3', 'NUMBER': 3, 'QMAX': 8.220000267028809, 'Q': 3.820113182067871, 'P': 4.771888484356168e-07, 'QMIN': -6.849999904632568, 'ID': '1 ', 'PMAX': 13.699999809265137}, 'ODIESELG1__Gr1': {'PMIN': 0.0, 'EXNAME': 'ODIESELG1 11.000', 'NAME': 'ODIESELG1', 'NUMBER': 1, 'QMAX': 8.220000267028809, 'Q': 3.8200631141662598, 'P': 4.771888484356168e-07, 'QMIN': -6.849999904632568, 'ID': '1 ', 'PMAX': 13.699999809265137}}
-LoadDico = {'ODIESEL__Lo1': {'EXNAME': 'ODIESEL 30.000', 'NAME': 'ODIESEL', 'NUMBER': 5, 'Q': 14.5, 'P': 30.000001907348633, 'ID': '1 '}, 'CITYB30__Lo1': {'EXNAME': 'CITYB30 30.000', 'NAME': 'CITYB30', 'NUMBER': 12, 'Q': 24.5, 'P': 50.0, 'ID': '1 '}, 'CITYD30__Lo1': {'EXNAME': 'CITYD30 30.000', 'NAME': 'CITYD30', 'NUMBER': 15, 'Q': 7.25, 'P': 15.000000953674316, 'ID': '1 '}, 'CITYC30__Lo1': {'EXNAME': 'CITYC30 30.000', 'NAME': 'CITYC30', 'NUMBER': 14, 'Q': 9.75, 'P': 20.0, 'ID': '1 '}}
-LineDico = {'NDIESEL__HYDRO90__Li1': {'TONAME': 'HYDRO90', 'FROMNUMBER': 10, 'FROMEXNAME': 'NDIESEL 90.000', 'FROMNAME': 'NDIESEL', 'TOEXNAME': 'HYDRO90 90.000', 'TONUMBER': 17, 'ID': '1 '}, 'CITYC90__SOLAR90__Li1': {'TONAME': 'SOLAR90', 'FROMNUMBER': 13, 'FROMEXNAME': 'CITYC90 90.000', 'FROMNAME': 'CITYC90', 'TOEXNAME': 'SOLAR90 90.000', 'TONUMBER': 20, 'ID': '1 '}, 'NDIESEL__CITYB90__Li1': {'TONAME': 'CITYB90', 'FROMNUMBER': 10, 'FROMEXNAME': 'NDIESEL 90.000', 'FROMNAME': 'NDIESEL', 'TOEXNAME': 'CITYB90 90.000', 'TONUMBER': 11, 'ID': '1 '}, 'NDIESEL__CITYB90__Li2': {'TONAME': 'CITYB90', 'FROMNUMBER': 10, 'FROMEXNAME': 'NDIESEL 90.000', 'FROMNAME': 'NDIESEL', 'TOEXNAME': 'CITYB90 90.000', 'TONUMBER': 11, 'ID': '2 '}, 'CITYC90__HYDRO90__Li1': {'TONAME': 'HYDRO90', 'FROMNUMBER': 13, 'FROMEXNAME': 'CITYC90 90.000', 'FROMNAME': 'CITYC90', 'TOEXNAME': 'HYDRO90 90.000', 'TONUMBER': 17, 'ID': '1 '}, 'ODIESEL__JUNCTION30__Li1': {'TONAME': 'JUNCTION30', 'FROMNUMBER': 5, 'FROMEXNAME': 'ODIESEL 30.000', 'FROMNAME': 'ODIESEL', 'TOEXNAME': 'JUNCTION30 30.000', 'TONUMBER': 21, 'ID': '1 '}, 'CITYB90__CITYC90__Li1': {'TONAME': 'CITYC90', 'FROMNUMBER': 11, 'FROMEXNAME': 'CITYB90 90.000', 'FROMNAME': 'CITYB90', 'TOEXNAME': 'CITYC90 90.000', 'TONUMBER': 13, 'ID': '1 '}, 'WIND30__JUNCTION30__Li1': {'TONAME': 'JUNCTION30', 'FROMNUMBER': 18, 'FROMEXNAME': 'WIND30 30.000', 'FROMNAME': 'WIND30', 'TOEXNAME': 'JUNCTION30 30.000', 'TONUMBER': 21, 'ID': '1 '}, 'CITYD30__JUNCTION30__Li1': {'TONAME': 'JUNCTION30', 'FROMNUMBER': 15, 'FROMEXNAME': 'CITYD30 30.000', 'FROMNAME': 'CITYD30', 'TOEXNAME': 'JUNCTION30 30.000', 'TONUMBER': 21, 'ID': '1 '}, 'HYDRO90__SOLAR90__Li1': {'TONAME': 'SOLAR90', 'FROMNUMBER': 17, 'FROMEXNAME': 'HYDRO90 90.000', 'FROMNAME': 'HYDRO90', 'TOEXNAME': 'SOLAR90 90.000', 'TONUMBER': 20, 'ID': '1 '}, 'CITYD30__SOLAR30__Li1': {'TONAME': 'SOLAR30', 'FROMNUMBER': 15, 'FROMEXNAME': 'CITYD30 30.000', 'FROMNAME': 'CITYD30', 'TOEXNAME': 'SOLAR30 30.000', 'TONUMBER': 19, 'ID': '1 '}, 'HYDRO30__WIND30__Li2': {'TONAME': 'WIND30', 'FROMNUMBER': 16, 'FROMEXNAME': 'HYDRO30 30.000', 'FROMNAME': 'HYDRO30', 'TOEXNAME': 'WIND30 30.000', 'TONUMBER': 18, 'ID': '2 '}, 'HYDRO30__WIND30__Li1': {'TONAME': 'WIND30', 'FROMNUMBER': 16, 'FROMEXNAME': 'HYDRO30 30.000', 'FROMNAME': 'HYDRO30', 'TOEXNAME': 'WIND30 30.000', 'TONUMBER': 18, 'ID': '1 '}}
-TransfoDico = {'ODIESELG2__ODIESEL__Tr1': {'TONAME': 'ODIESEL', 'FROMNUMBER': 2, '#WIND': 2, 'FROMEXNAME': 'ODIESELG2 11.000', 'FROMNAME': 'ODIESELG2', 'TOEXNAME': 'ODIESEL 30.000', 'TONUMBER': 5, 'ID': '1 '}, 'NDIESELG3__NDIESEL__Tr1': {'TONAME': 'NDIESEL', 'FROMNUMBER': 8, '#WIND': 2, 'FROMEXNAME': 'NDIESELG3 11.000', 'FROMNAME': 'NDIESELG3', 'TOEXNAME': 'NDIESEL 90.000', 'TONUMBER': 10, 'ID': '1 '}, 'ODIESEL__NDIESEL__Tr1': {'TONAME': 'NDIESEL', 'FROMNUMBER': 5, '#WIND': 2, 'FROMEXNAME': 'ODIESEL 30.000', 'FROMNAME': 'ODIESEL', 'TOEXNAME': 'NDIESEL 90.000', 'TONUMBER': 10, 'ID': '1 '}, 'SOLAR30__SOLAR90__Tr1': {'TONAME': 'SOLAR90', 'FROMNUMBER': 19, '#WIND': 2, 'FROMEXNAME': 'SOLAR30 30.000', 'FROMNAME': 'SOLAR30', 'TOEXNAME': 'SOLAR90 90.000', 'TONUMBER': 20, 'ID': '1 '}, 'NDIESELG2__NDIESEL__Tr1': {'TONAME': 'NDIESEL', 'FROMNUMBER': 7, '#WIND': 2, 'FROMEXNAME': 'NDIESELG2 11.000', 'FROMNAME': 'NDIESELG2', 'TOEXNAME': 'NDIESEL 90.000', 'TONUMBER': 10, 'ID': '1 '}, 'HYDRO30__HYDRO90__Tr1': {'TONAME': 'HYDRO90', 'FROMNUMBER': 16, '#WIND': 2, 'FROMEXNAME': 'HYDRO30 30.000', 'FROMNAME': 'HYDRO30', 'TOEXNAME': 'HYDRO90 90.000', 'TONUMBER': 17, 'ID': '1 '}, 'CITYC90__CITYC30__Tr1': {'TONAME': 'CITYC30', 'FROMNUMBER': 13, '#WIND': 2, 'FROMEXNAME': 'CITYC90 90.000', 'FROMNAME': 'CITYC90', 'TOEXNAME': 'CITYC30 30.000', 'TONUMBER': 14, 'ID': '1 '}, 'NDIESELG1__NDIESEL__Tr1': {'TONAME': 'NDIESEL', 'FROMNUMBER': 6, '#WIND': 2, 'FROMEXNAME': 'NDIESELG1 11.000', 'FROMNAME': 'NDIESELG1', 'TOEXNAME': 'NDIESEL 90.000', 'TONUMBER': 10, 'ID': '1 '}, 'HYDRO30__HYDRO90__Tr2': {'TONAME': 'HYDRO90', 'FROMNUMBER': 16, '#WIND': 2, 'FROMEXNAME': 'HYDRO30 30.000', 'FROMNAME': 'HYDRO30', 'TOEXNAME': 'HYDRO90 90.000', 'TONUMBER': 17, 'ID': '2 '}, 'CITYB90__CITYB30__Tr1': {'TONAME': 'CITYB30', 'FROMNUMBER': 11, '#WIND': 2, 'FROMEXNAME': 'CITYB90 90.000', 'FROMNAME': 'CITYB90', 'TOEXNAME': 'CITYB30 30.000', 'TONUMBER': 12, 'ID': '1 '}, 'CITYB90__CITYB30__Tr2': {'TONAME': 'CITYB30', 'FROMNUMBER': 11, '#WIND': 2, 'FROMEXNAME': 'CITYB90 90.000', 'FROMNAME': 'CITYB90', 'TOEXNAME': 'CITYB30 30.000', 'TONUMBER': 12, 'ID': '2 '}, 'HYDRO30__HYDRO90__Tr3': {'TONAME': 'HYDRO90', 'FROMNUMBER': 16, '#WIND': 2, 'FROMEXNAME': 'HYDRO30 30.000', 'FROMNAME': 'HYDRO30', 'TOEXNAME': 'HYDRO90 90.000', 'TONUMBER': 17, 'ID': '3 '}, 'SOLAR30__SOLAR90__Tr2': {'TONAME': 'SOLAR90', 'FROMNUMBER': 19, '#WIND': 2, 'FROMEXNAME': 'SOLAR30 30.000', 'FROMNAME': 'SOLAR30', 'TOEXNAME': 'SOLAR90 90.000', 'TONUMBER': 20, 'ID': '2 '}, 'ODIESELG3__ODIESEL__Tr1': {'TONAME': 'ODIESEL', 'FROMNUMBER': 3, '#WIND': 2, 'FROMEXNAME': 'ODIESELG3 11.000', 'FROMNAME': 'ODIESELG3', 'TOEXNAME': 'ODIESEL 30.000', 'TONUMBER': 5, 'ID': '1 '}, 'NDIESELG4__NDIESEL__Tr1': {'TONAME': 'NDIESEL', 'FROMNUMBER': 9, '#WIND': 2, 'FROMEXNAME': 'NDIESELG4 11.000', 'FROMNAME': 'NDIESELG4', 'TOEXNAME': 'NDIESEL 90.000', 'TONUMBER': 10, 'ID': '1 '}, 'ODIESELG4__ODIESEL__Tr1': {'TONAME': 'ODIESEL', 'FROMNUMBER': 4, '#WIND': 2, 'FROMEXNAME': 'ODIESELG4 11.000', 'FROMNAME': 'ODIESELG4', 'TOEXNAME': 'ODIESEL 30.000', 'TONUMBER': 5, 'ID': '1 '}, 'ODIESELG1__ODIESEL__Tr1': {'TONAME': 'ODIESEL', 'FROMNUMBER': 1, '#WIND': 2, 'FROMEXNAME': 'ODIESELG1 11.000', 'FROMNAME': 'ODIESELG1', 'TOEXNAME': 'ODIESEL 30.000', 'TONUMBER': 5, 'ID': '1 '}}
-MotorDico = {}
-
-Dico ={'DIRECTORY': {'PSSPY_path': 'C:\\Program Files (x86)\\PTI\\PSSE34\\PSSPY27', 'PSSE_path': 'C:\\Program Files (x86)\\PTI\\PSSE34\\PSSBIN', 'sav_file': 'X:/Small Grid PSSE/TestIsland_2015_OPF - Areas.sav', 'results_folder': 'X:/Small Grid PSSE/Results'}, 'PSSE_PARAMETERS': {'UNIT_COMMITMENT': True, 'I_MAX': 'RateA', 'DECIMAL_SEPARATOR': '.', 'FUEL_COST': True, 'ALGORITHM': 'Optimum Power Flow', 'MVAR_COST': False, 'ITERATION_LIMIT': 20, 'SAVE_CASE_BEFORE_UNIT_COMMITMENT': False, 'LOCK_TAPS': True, 'LOADSHEDDING_COST': False}, 'CORRELATION': {'CorrelationMatrix': ["['load']", '[1.0]']}, 'DISTRIBUTIONload': {'Load': ['CITYB30__Lo1', 'CITYC30__Lo1', 'CITYD30__Lo1', 'ODIESEL__Lo1'], 'A': 0.8, 'B': 0.9, 'Activated': True, 'Sampling': 'Same sample for all loads', 'ComponentType': 'Load', 'Law': 'Uniform', 'Type': 'Load Level'}, 'SIMULATION': {'NUMBER_PACKAGE': 1, 'SIZE_PACKAGE': 10}}
\ No newline at end of file
+++ /dev/null
-# -*- coding: cp1252 -*-
-#===============================================================================
-# PSEN SCRIPT FOR PROBABILISTIC STUDIES OF ELECTICAL NETWORKS
-#===============================================================================
-from openturns import *
-from pylab import *
-from math import*
-import os, random, sys
-import numpy as np
-import time #import gmtime, strftime, sleep
-from array import *
-
-from support_functions import *
-import pdb
-import multiprocessing
-import copy
-import PSENconfig #file with Eficas output dictionaries
-import shutil
-import csv
-
-InitializeDispatchGentoP0 = False
-Debug = True #pour faire des tests
-## =============================================================================================
-def function_callback(result): #define callback for a probabilistic study
- output.extend(result[0])
- inputSamp.extend(result[1])
- Pmachine.extend(result[2])
-## =============================================================================================
-def callback_indices(indices): #define callback function for probabilistic study
- Ind1.extend(indices[0])
- Ind2.extend(indices[1])
-## =============================================================================================
-def function_callback_psse(result): #define callback function for time study
- #print(result)
- output.extend(result[1])
- inputSamp.extend(result[0])#5])
- Pmachine.extend(result[2])#6])
-
-def log(filename, text):
- f=open(filename, 'a')
- f.write(text)
- f.close()
-
-## =============================================================================================
-def init_PSSEWrapper():
- sys.path.append(PSENconfig.Dico['DIRECTORY']['PSSE_path'])
- os.environ['PATH'] = PSENconfig.Dico['DIRECTORY']['PSSE_path'] + ";"+ os.environ['PATH']
-
- if Debug:
- cmd_Path=os.getcwd()+'\usrCmd.py' #lancement depuis pssewrapper.py
- #cmd_Path=os.getcwd()+'\PSEN\usrCmd.py' #lancement depuis qteficas_psen.py
- else:
- cmd_Path=os.path.join(os.path.dirname(os.path.abspath(__file__)),"usrCmd.py")
- ##cmd_Path=os.getcwd()+'\EficasV1\PSEN_Eficas\PSEN\usrCmd.py' #lancement avec le .bat
- return cmd_Path
-## =============================================================================================
-def init_PSSE(Paths):
- ## Inititalisation de PSSE
- import psspy
- import pssarrays
- import redirect
- _i=psspy.getdefaultint()
- _f=psspy.getdefaultreal()
- _s=psspy.getdefaultchar()
- redirect.psse2py()
- psspy.psseinit(80000)
-
- # Silent execution of PSSe
- islct=6 # 6=no output; 1=standard
- psspy.progress_output(islct)
-
- #read sav
- psspy.case(Paths['sav_file'])
- all_inputs_init=read_sav(Paths['sav_file'])
-
-## rappel sur la strucutre de item[] qui contient les elements reseau
-# plants = all_inputs_init[3]
-# for item in plants:
-# bus = item[0]
-# status = item[1]
-# _id = item[2]
-# pgen = item[3]
-# qgen = item[4]
-# mvabase = item [5]
-# pmax = item[6]
-# qmax = item[7]
-# name = item[8]
-
- if Debug:
- print("all_inputs_init[][] = generateurs ", " init_PSSE")
- for item in all_inputs_init[3]:
- print(item[8])
- return all_inputs_init
-
-## =============================================================================================
-def read_PSENconfig():
- """"
- Read the file PSENconfig
- PSENconfig contains all the information about the element in the network and the user configuration
- """
- Paths = PSENconfig.Dico['DIRECTORY']
- SimuParams = PSENconfig.Dico['SIMULATION']
- PSSEParams = PSENconfig.Dico['PSSE_PARAMETERS']
-
- # Probabilistic Study: central dispersion => Monte Carlo or LHS iterations
- if SimuParams.has_key('NUMBER_PACKAGE'):
- nb_fix = int(SimuParams['NUMBER_PACKAGE'])
- elif SimuParams.has_key('CONVERGENCE'):
- if SimuParams['CONVERGENCE']==1:
- nb_fix=0
- else:
- nb_fix=100
- print '\nALERT:\nConvergence not selected, and no number of packages chosen: default number= 100'
- time.sleep(2)
-
- #CHARGEMENT DE PSSE - LOADING OF PSSE
-# pssFolder=str(Paths['PSSE_path']) ### ne semble pas etre utilise
- os.environ['PATH'] += ';' + Paths['results_folder']
- os.chdir(Paths['results_folder'])
-
- if Debug:
- print(Paths, SimuParams, PSSEParams, nb_fix, " Paths, SimuParams, PSSEParams, nb_fix", " read_PSENconfig()")
-
- return Paths, SimuParams, PSSEParams, nb_fix
-
-## =============================================================================================
-#### TEST A FAIRE : creer deux PSENConfig differents : 1 ou matrice de correlation presente et l'autre non pour voir si "Laws" correct
-def read_laws():
- """
- si la loi = pdf_from_file ou time_serie_from_file : on va lire les donnees contenues dans le csv associe
- et on met a jour le dictionnaire Laws[shortkey]['FileContents']
-
- fonction a faire evoluer pour traiter toutes les lois de la meme maniere
- """
- ## si la matrice de correlation existe, on lit l entete de la matrice et on cree une liste
- if PSENconfig.Dico.has_key('CORRELATION'):
- LawNames = RemoveListfromString(PSENconfig.Dico['CORRELATION']['CorrelationMatrix'][0]) ## RemoveListfromString est def ds support_functions
- Laws = {} ## contient l ensemble des distributions (copié depuis PSENconfig)
- NonActiveIndices = [] ## comprendre ce que cela contient
- TSindices = [] ## comprendre ce que cela contient
- for key in PSENconfig.Dico.keys():
- if key[0:12] == 'DISTRIBUTION':
- shortkey = key[12:]
- if PSENconfig.Dico[key]['Activated']==True: #only take into account laws which are "activated"
- Laws[shortkey]= PSENconfig.Dico[key]
- if Laws[shortkey]['Law']=='PDF_from_file': #read contents of .csv file
- g=open(Laws[shortkey]['FileName'],"r")
- lines=g.readlines()
- g.close()
- Laws[shortkey]['FileContents']=lines
- elif Laws[shortkey]['Law']=='TimeSeries_from_file': #read contents of .csv file
- g=open(Laws[shortkey]['FileName'],"r")
- lines=g.readlines()
- g.close()
- Laws[shortkey]['FileContents']=lines
- if PSENconfig.Dico.has_key('CORRELATION'):
- TSindices.append(LawNames.index(shortkey))
- if Laws[shortkey].has_key(Laws[shortkey]['ComponentType']):
- if isinstance(Laws[shortkey][Laws[shortkey]['ComponentType']],str):
- Laws[shortkey][Laws[shortkey]['ComponentType']]=[Laws[shortkey][Laws[shortkey]['ComponentType']]] #if only one entry, create list
- if Laws[shortkey]['ComponentType']=='Reserve Constraint':
- Laws[shortkey]['Type']='Reserve Constraint'
- if Laws[shortkey].has_key('TF_Input'): #If user inputted transfer function
- Laws[shortkey]['TransferFunction']=True
- else:
- Laws[shortkey]['TransferFunction']=False
- else:
- if PSENconfig.Dico.has_key('CORRELATION'):
- NonActiveIndices.append(LawNames.index(shortkey))
- if Debug:
- print(Laws, TSindices, NonActiveIndices, LawNames)
-
- return Laws, TSindices, NonActiveIndices, LawNames
-## =============================================================================================
-
-def read_or_create_corrmatrix(LawNames, NonActiveIndices, TSindices):
- if PSENconfig.Dico.has_key('CORRELATION'):
- #Treat Correlation Matrix - eliminate non-activated laws
- CorrMatrix0 = {}
- LawNames2 = []
-
- for i, lawname in enumerate(LawNames):
- if i not in NonActiveIndices:
- LawNames2.append(lawname)
- Cmax = PSENconfig.Dico['CORRELATION']['CorrelationMatrix'][1:]
- CMax = []
- for i,c in enumerate(Cmax):
- if i not in NonActiveIndices:
- c = RemoveListfromString(c)
- c = map(float,c)
- c2 = []
- for ind, c_el in enumerate(c):
- if ind not in NonActiveIndices:
- #if time series, don't correlate other laws with the value "1".
- if (ind not in TSindices) and (i not in TSindices):
- c2.append(c_el)
- elif i==ind:
- c2.append(1.)
- else:
- c2.append(0.)
- CMax.append(c2)
- CorrMatrix0['matrix'] = np.array(CMax)
- CorrMatrix0['laws'] = LawNames2
-
- else: #acceptable only if all active distributions are time series or if only 1 active distribution
- if len(Laws)==1: #create correlation matrix of 1 x 1
- CorrMatrix0 = {}
- CorrMatrix0['matrix'] = np.array([[1]])
- CorrMatrix0['laws'] = Laws.keys()
- else: #>1 law, test if all TS
- allTS=True
- for key in Laws.keys():
- if Laws[key]['Law']!='TimeSeries_from_file':
- allTS=False
- if allTS:
- CorrMatrix0 = {}
- CorrMatrix0['matrix']=np.eye(len(Laws))
- CorrMatrix0['laws']=Laws.keys()
- else:
- print 'Error: Correlation matrix must be defined. Enter 0''s for correlations between laws and time series.'
- sys.exit(1)
-
- if Debug:
- print(CorrMatrix0, " read_or_create_corrmatrix(LawNames, NonActiveIndices, TSindices)", " CorrMatrix0")
-
- return CorrMatrix0
-
-## =============================================================================================
-def contingency():
- """
- utilise la fonction config_contingency() definie dans support_functions.py
- """
- # Treat Contingency Files enteres as CSVs
- LinesList = []
- GeneratorsList = []
- LoadsList = []
- TransformersList = []
- MotorsList = []
-
- if PSENconfig.Dico.has_key('N_1_LINES'):
- if PSENconfig.Dico['N_1_LINES']['Activated']==True:
- LinesList = PSENconfig.Dico['N_1_LINES']['Probability']
- if PSENconfig.Dico.has_key('N_1_GENERATORS'):
- if PSENconfig.Dico['N_1_GENERATORS']['Activated']==True:
- GeneratorsList = PSENconfig.Dico['N_1_GENERATORS']['Probability']
- if PSENconfig.Dico.has_key('N_1_LOADS'):
- if PSENconfig.Dico['N_1_LOADS']['Activated']==True:
- LoadsList = PSENconfig.Dico['N_1_LOADS']['Probability']
- if PSENconfig.Dico.has_key('N_1_TRANSFORMERS'):
- if PSENconfig.Dico['N_1_TRANSFORMERS']['Activated']==True:
- TransformersList = PSENconfig.Dico['N_1_TRANSFORMERS']['Probability']
- if PSENconfig.Dico.has_key('N_1_MOTORS'):
- if PSENconfig.Dico['N_1_MOTORS']['Activated']==True:
- MotorsList = PSENconfig.Dico['N_1_MOTORS']['Probability']
-
- try :
- continLines, continGroups, continTransfos, continLoads, continMotors, continVal, continProb = config_contingency(LinesList,GeneratorsList,TransformersList,LoadsList,MotorsList)
- except IOError : # Si le fichier n'est pas dans un bon format on traite l'exception
- print 'Error with contingency input file'
- else :
- continLines, continGroups, continTransfos, continLoads, continMotors, continVal, continProb = config_contingency(LinesList,GeneratorsList,TransformersList,LoadsList,MotorsList)
-
- if len(continVal)>0:
- N_1_fromFile = True
- else:
- N_1_fromFile = False
-
- # Creation variable nom dossier N-1
- if N_1_fromFile == True :
- folderN_1 = '1_'
- else :
- folderN_1 = '_'
-
- if Debug:
- print(continLines, continGroups, continTransfos, continLoads, continMotors, continVal, continProb, N_1_fromFile, folderN_1, " continLines, continGroups, continTransfos, continLoads, continMotors, continVal, continProb, N_1_fromFile, folderN_1", " fonction : contingency()")
- return continLines, continGroups, continTransfos, continLoads, continMotors, continVal, continProb, N_1_fromFile, folderN_1
-## ===============================================================================================
-
-def TS(CorrMatrix):
- # Definition des variables pour les series temporelles
- # a passer en pandas ?
-
- time_serie_flag=[]
- time_serie_mat=[]
- time_serie_time=[]
- timeVect = []
- for i,key in enumerate(CorrMatrix['laws']) :
- if Laws[key]['Law']=='TimeSeries_from_file':
- linesTS = Laws[key]['FileContents']
- time_serie = 1 #raise the flag time_serie
- tsm=[]
- tVect=[]
- for j in range (len(linesTS)) :
- try:
- tsm.append(float(commaToPoint(linesTS[j].split(';')[1])))
- tVect.append(linesTS[j].split(';')[0])
- except :
- pass
- time_serie_time.append(tVect)
- time_serie_flag.append(1)
- time_serie_mat.append(tsm)
- else:
- time_serie_flag.append(-1)
- if N_1_fromFile==True:
- time_serie_flag.append(-1)
-
- #find shortest time series column
- try:
- time_serie
- timeVect = time_serie_time[0]
- for index, tV in enumerate(time_serie_time):
- if len(tV) < len(timeVect):
- timeVect = tV
- except NameError:
- pass
-
- #change time Vector into iteration numbers (otherwise difficult for post processing)
- N = len(timeVect)
- timeVect = range(1, N+1)
-
- time_serie_mat=zip(*time_serie_mat)
-
- if Debug:
- print(time_serie_flag, time_serie_mat, time_serie_time, timeVect, " time_serie_flag, time_serie_mat, time_serie_time, timeVect", " fonction TS()")
-
- return time_serie_flag, time_serie_mat, time_serie_time, timeVect
-## ===============================================================================================
-
-
-
-""" DEBUT DU MAIN """
-
-if __name__ == '__main__':
-
- cmd_Path = init_PSSEWrapper()
- Paths, SimuParams, PSSEParams, nb_fix = read_PSENconfig()
- all_inputs_init = init_PSSE(Paths)
- log("report.txt", "Starting time: %f; Monte Carlo Size : %f; " % (time.clock(), SimuParams["SIZE_PACKAGE"]))
-
- Laws, TSindices, NonActiveIndices, LawNames = read_laws()
- CorrMatrix = read_or_create_corrmatrix(LawNames, NonActiveIndices, TSindices)
-
- continLines, continGroups, continTransfos, continLoads, continMotors, continVal, continProb, N_1_fromFile, folderN_1 = contingency()
-
- time_serie_flag, time_serie_mat, time_serie_time, timeVect = TS(CorrMatrix)
-
-
- exit()
-
-
-
- ## configuration de l opf dans psse
- if PSSEParams['ALGORITHM']=='Optimum Power Flow': #run OPF so that adjustable bus shunts are included
- psspy.produce_opf_log_file(1,r"""DETAIL""")
- TapChange = 1-int(PSSEParams['LOCK_TAPS']) #0 if locked, 1 if stepping
- psspy.opf_fix_tap_ratios(1-TapChange) #0 : do not fix transformer tap ratios
- psspy.report_output(6,"",[0,0]) #6=no outputpsspy
- psspy.minimize_fuel_cost(int(PSSEParams['FUEL_COST']))
- psspy.minimize_adj_bus_shunts(int(PSSEParams['MVAR_COST']))
- psspy.minimize_load_adjustments(int(PSSEParams['LOADSHEDDING_COST']))
- psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
- psspy.set_opf_report_subsystem(3,0)
-
- #access OPF data
- allbus=1
- include = [1,1,1,1] #isolated buses, out of service branches, subsystem data, subsystem tie lines
- out = 0 #out to file, not window
- # if psspy.bsysisdef(0):
- # sid = 0
- # else: # Select subsytem with all buses
- # sid = -1
- sid = 3
- RopFile = Paths['sav_file'][0:-4]+'.rop'
- AlreadyRop = os.path.isfile(RopFile)
- if not AlreadyRop:
- ierr = psspy.rwop(sid,allbus,include,out,RopFile) #write rop file
- GenDispatchData, DispTableData, LinCostTables, QuadCostTables, PolyCostTables, GenReserveData, PeriodReserveData,AdjBusShuntData,AdjLoadTables = readOPFdata(RopFile)
- if PSSEParams['UNIT_COMMITMENT']:
- if PSSEParams.has_key('SpinningReserveID'):
- PSSEParams['SpinningReserveID_1']= PSSEParams['SpinningReserveID']
- del PSSEParams['SpinningReserveID']
- for num in range(1,16):
- keyname = 'SpinningReserveID_' + str(int(num))
- if PSSEParams.has_key(keyname):
- ReserveID = PSSEParams[keyname]
- ReserveFound = False
- ReserveActive=False
- for PRD in PeriodReserveData:
- if PRD[0] == ReserveID:
- ReserveFound=True
- ReserveActive=PRD[3]
- if not ReserveFound:
- print 'ALERT: ReserveID ', str(ReserveID), ' is not found. User must define period reserve in .sav file before incluing a distribution on the reserve constraint in PSEN.'
- if not ReserveActive:
- print 'ALERT: Spinning Reserve Correction entered in PSEN, but ReserveID ', str(ReserveID), ' is not activated in PSS/E.'
- else:
- pass
- psspy.nopf(0,1) # Lancement OPF
- postOPFinitialization(Paths['sav_file'],all_inputs_init,AdjLoadTables,init_gen=True,init_bus=True,init_fxshnt=True,init_swshnt=True,init_load=True,init_P0=InitializeDispatchGentoP0)
- #print "OPF run"
-
- all_inputs_after_OPF = read_sav(Paths['sav_file'])
-
-## est ce qu on recopie ?
- buses_base=all_inputs_after_OPF[0]
- lines_base=all_inputs_after_OPF[1]
- trans_base=all_inputs_after_OPF[2]
- plants_base=all_inputs_after_OPF[3]
- loads_base=all_inputs_after_OPF[4]
- shunt_base=all_inputs_after_OPF[5]
- motors_base=all_inputs_after_OPF[6]
- trans3_base=all_inputs_after_OPF[7]
- swshunt_base=all_inputs_after_OPF[8]
-
- ## passer en pandas
- # Initialize size output
- sizeY0=len(plants_base) #np.matrix(plants_base).shape[0]
- sizeY1=len(buses_base) #np.matrix(buses_base).shape[0]
- sizeY2=len(lines_base) #np.matrix(lines_base).shape[0]
- sizeY3=len(loads_base) #np.matrix(loads_base).shape[0]
- sizeY4=len(shunt_base) #np.matrix(shunt_base).shape[0]
- sizeY5=len(trans_base) #np.matrix(trans_base).shape[0]
- sizeY6=len(motors_base) #np.matrix(motors_base).shape[0]
- sizeY7=len(trans3_base)
- sizeY8=len(swshunt_base) #np.matrix(shunt_base).shape[0]
- sizeY=[sizeY0,sizeY1,sizeY2,sizeY5,sizeY7,sizeY3,sizeY6,sizeY4,sizeY8]
- sizeOutput=sizeY2
-
- #####################################################################################
- ## a mettre dans la partie "lecture des parametres"
- if SimuParams.has_key('MAX_CORES'):
- max_cores = SimuParams['MAX_CORES']
- else:
- max_cores = multiprocessing.cpu_count()
-
- try:
- time_serie
- except NameError: #probabilistic
- if max_cores==1:
- print('Must use at least 2 cores for probabilistic simulation. MAX_CORES parameter set to 2.')
- max_cores=2
- num_cores=min(min(multiprocessing.cpu_count(),max_cores)-1, nb_fix) #Num cores
-
- print('Number of cores used: ' + str(num_cores + 1))
- ## a tester ( a prioiri on ne passe pas dans le else et donc on n'arrive pas à ne pas faire du multiprocessing)
- ## on a decale le else sous le if : cela devrait fonctionner
- else:
- num_cores=min(multiprocessing.cpu_count(),max_cores)
- NoMultiProcTS=False
- if num_cores==1:
- NoMultiProcTS = True
- if Debug==True:
- NoMultiProcTS = True
- print('Number of cores used: ' + str(num_cores))
-
-
- #Extension name for the folders and files
- day=time.strftime("%Y%m%d", time.gmtime())
- hour=time.strftime("%Hh%Mm%S", time.gmtime())
-
- # Initialize the big folder
- pathBigFolder = Paths['results_folder']+"/N"+folderN_1+day+"_"+hour
- if not os.path.exists(pathBigFolder): os.makedirs(pathBigFolder)
-
-
- #folder=Paths['results_folder']+"/N"+folderN_1+day #big folder
- for j in range(num_cores):
- # Initialize a folder per core
- pathSmallFolder = pathBigFolder+'\package'+str(j)+"_N"+folderN_1+day+"_"+hour
- if not os.path.exists(pathSmallFolder): os.makedirs(pathSmallFolder)
- #####################################################################################
-
-
- ## ecriture des fichiers de sortie
- ## a passer en pandas
-
- ## ecriture des entetes
- # Initialize the logger : write the headers
- entete = ""
- unit = ""
- for key in CorrMatrix['laws']:
- if Laws[key]['ComponentType']=='Generator':
- if Laws[key]['Type']=='Generator Availability':
- entete+="X:genStatus" + key + ";"
- unit += ";"
- else:
- entete+="X:Gen" + key + "(%Pnom);"
- unit += "%Pnom;"
- elif Laws[key]['ComponentType']=='Load':
- if Laws[key]['Type']=='Load Availability':
- entete+="X:loadStatus" + key + ";"
- unit += ";"
- else:
- entete+="X:Load" + key + "(p.u.);"
- unit += "p.u.;"
- elif Laws[key]['ComponentType']=='Line':
- entete+="X:lineStatus" + key + ";"
- unit += ";"
- elif Laws[key]['ComponentType']=='Transformer':
- entete+="X:transfoStatus" + key + ";"
- unit += ";"
- elif Laws[key]['ComponentType']=='Motor':
- entete+="X:motorStatus" + key + ";"
- unit += ";"
-
- elif Laws[key]['ComponentType']=='Reserve Constraint':
- entete+="X:Reserve" + key + ";"
- unit += "MW;"
-
- if N_1_fromFile==True:
- entete += "X:N-1;"
- unit += "component disconnected;"
- entete2=entete + ";Y:NumTransitLine;Y:NumTransitTr;Y:NumVoltage;Y:GenTot;Y:LoadTot;Y:%Losses;Y:Max%ALine;Y:Max%ATr;Y:NumTransit_0.9-1Line;Y:NumTransit_0.9-1Tr;Y:AddedMVAR;Y:LoadShedding;Y:GensDisconnected;;"
- if PSSEParams['ALGORITHM']=='Economic Dispatch and Power Flow':
- entete+=";Y:NumTransitLine;Y:NumTransitTr;Y:NumVoltage;Y:GenTot;Y:LoadTot;Y:%Losses;Y:Max%ALine;Y:Max%ATr;Y:NumTransit_0.9-1Line;Y:NumTransit_0.9-1Tr;Y:AddedMVAR;Y:LoadShedding;Y:PlimitSwing;Y:QlimitSwing;;"
- else:
- entete+=";Y:NumTransitLine;Y:NumTransitTr;Y:NumVoltage;Y:GenTot;Y:LoadTot;Y:%Losses;Y:Max%ALine;Y:Max%ATr;Y:NumTransit_0.9-1Line;Y:NumTransit_0.9-1Tr;Y:AddedMVAR;Y:LoadShedding;;"
-
-
- unit2= unit + ';Num;Num;Num;MW;MW;%;%;%;Num;Num;MVAR;MW;[(bus, id),...];;'
- if PSSEParams['ALGORITHM']=='Economic Dispatch and Power Flow':
- unit+=';Num;Num;Num;MW;MW;%;%;%;Num;Num;MVAR;MW;T/F;T/F;;'
- else:
- unit+=';Num;Num;Num;MW;MW;%;%;%;Num;Num;MVAR;MW;;'
- string = "Iteration;;" + entete
- unitstring = "Num;;" + unit
- string2 = "Iteration;;" + entete2
- unitstring2 = "Num;;" + unit2
-
- logCSVfilename=[]
- logCSVfilename_UC=[]
- ## attention : on ecrit un fichier de sortie dans chaque sous dossier
- for i in range(num_cores):
- logCSVfilename.append(pathBigFolder+'/package'+str(i)+"_N"+folderN_1+day+ "_" + hour + "/simulationDClog_"+hour+".csv") # Name of the file : global variable
- logCSVfilename_UC.append(pathBigFolder+'/package'+str(i)+"_N"+folderN_1+day+ "_" + hour + "/simulationDClog_beforeUC_"+hour+".csv") # Name of the file : global variable
- f = open(logCSVfilename[i], "a")
- f2 = open(logCSVfilename_UC[i], "a")
-
- f.write(string)
- f2.write(string2)
-
- # Names of the Output variables with the bus number
- for name in range (sizeY0):
- f.write("Y:PMachine"+str(plants_base[name][0])+"id"+ str(plants_base[name][2])+ ";")
- f2.write("Y:PMachine"+str(plants_base[name][0])+"id"+ str(plants_base[name][2])+ ";")
- for name in range (sizeY0):
- f.write("Y:QMachine"+str(plants_base[name][0])+"id"+ str(plants_base[name][2])+";")
- f2.write("Y:QMachine"+str(plants_base[name][0])+"id"+ str(plants_base[name][2])+";")
- for name in range (sizeY1):
- f.write("Y:VBus"+str(buses_base[name][0])+";")
- f2.write("Y:VBus"+str(buses_base[name][0])+";")
- for name in range (sizeY2):
- f.write("Y"+str(name+1)+":%Rate "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
- f2.write("Y"+str(name+1)+":%Rate "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
- for name in range (sizeY2):
- f.write("Y"+str(name+1)+":P "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
- f2.write("Y"+str(name+1)+":P "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
- for name in range (sizeY2):
- f.write("Y"+str(name+1)+":Q "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
- f2.write("Y"+str(name+1)+":Q "+str(lines_base[name][0])+"-"+str(lines_base[name][1])+" id"+ str(lines_base[name][10])+";")
- for name in range (sizeY5):
- f.write("Y"+str(name+1)+":Tr%Rate "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
- f2.write("Y"+str(name+1)+":Tr%Rate "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
- for name in range (sizeY5):
- f.write("Y"+str(name+1)+":TrP "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
- f2.write("Y"+str(name+1)+":TrP "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
- for name in range (sizeY5):
- f.write("Y"+str(name+1)+":TrQ "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
- f2.write("Y"+str(name+1)+":TrQ "+str(trans_base[name][0])+"-"+str(trans_base[name][1])+" id"+ str(trans_base[name][10]).strip()+";")
-
- for name in range (sizeY7):
- f.write("Y"+str(name+1)+":Tr3%Rate "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
- f2.write("Y"+str(name+1)+":Tr3%Rate "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
- for name in range (sizeY7):
- f.write("Y"+str(name+1)+":Tr3P "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
- f2.write("Y"+str(name+1)+":Tr3P "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
- for name in range (sizeY7):
- f.write("Y"+str(name+1)+":Tr3Q "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
- f2.write("Y"+str(name+1)+":Tr3Q "+str(trans3_base[name][0])+"-"+str(trans3_base[name][1])+"-"+str(trans3_base[name][2])+" id"+ str(trans3_base[name][13]).strip()+ " wnd"+str(trans3_base[name][3])+";")
- for name in range (sizeY3):
- f.write("Y:Load "+str(loads_base[name][0])+" id"+ str(loads_base[name][5])+";")
- f2.write("Y:Load "+str(loads_base[name][0])+" id"+ str(loads_base[name][5])+";")
- for name in range (sizeY6):
- f.write("Y:MotorP "+str(motors_base[name][0])+" id"+ str(motors_base[name][5])+";")
- f2.write("Y:MotorP "+str(motors_base[name][0])+" id"+ str(motors_base[name][5])+";")
- for name in range (sizeY6):
- f.write("Y:MotorQ "+str(motors_base[name][0])+" id"+ str(motors_base[name][5])+";")
- f2.write("Y:MotorQ "+str(motors_base[name][0])+" id"+ str(motors_base[name][5])+";")
- for name in range (sizeY4):
- f.write("Y:Shunt bus "+str(shunt_base[name][0])+" id"+ str(shunt_base[name][5])+";")
- f2.write("Y:Shunt bus "+str(shunt_base[name][0])+" id"+ str(shunt_base[name][5])+";")
- for name in range (sizeY8):
- f.write("Y:Sw shunt bus "+str(swshunt_base[name][0])+";")
- f2.write("Y:Sw shunt bus "+str(swshunt_base[name][0])+";")
- f.write("\n")
- f2.write("\n")
- # Names of the Output variables with the bus names
- f.write(unitstring)
- f2.write(unitstring2)
- for name in range (sizeY0):
- f.write(str(plants_base[name][8]).replace('\n','')+";")
- f2.write(str(plants_base[name][8]).replace('\n','')+";")
- for name in range (sizeY0):
- f.write(str(plants_base[name][8]).replace('\n','')+";")
- f2.write(str(plants_base[name][8]).replace('\n','')+";")
- for name in range (sizeY1):
- f.write(str(buses_base[name][3]).replace("\n",'')+";")
- f2.write(str(buses_base[name][3]).replace("\n",'')+";")
- for name in range (sizeY2):
- f.write(str(lines_base[name][8]).replace("\n",'').replace("-","_")+ " - " +str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- f2.write(str(lines_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- for name in range (sizeY2):
- f.write(str(lines_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- f2.write(str(lines_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- for name in range (sizeY2):
- f.write(str(lines_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- f2.write(str(lines_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(lines_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- for name in range (sizeY5):
- f.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- f2.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- for name in range (sizeY5):
- f.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- f2.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- for name in range (sizeY5):
- f.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- f2.write(str(trans_base[name][8]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans_base[name][9]).replace("\n",'').replace(" - "," _ ")+";")
- for name in range (sizeY7):
- f.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
- f2.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
- for name in range (sizeY7):
- f.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
- f2.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
- for name in range (sizeY7):
- f.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
- f2.write(str(trans3_base[name][10]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][11]).replace("\n",'').replace(" - "," _ ")+" - "+str(trans3_base[name][12]).replace("\n",'').replace(" - "," _ ")+";")
- for name in range (sizeY3):
- f.write(str(loads_base[name][4]).replace("\n",'')+";")
- f2.write(str(loads_base[name][4]).replace("\n",'')+";")
- for name in range (sizeY6):
- f.write(str(motors_base[name][4]).replace("\n",'')+";")
- f2.write(str(motors_base[name][4]).replace("\n",'')+";")
- for name in range (sizeY6):
- f.write(str(motors_base[name][4]).replace("\n",'')+";")
- f2.write(str(motors_base[name][4]).replace("\n",'')+";")
- for name in range (sizeY4):
- f.write(str(shunt_base[name][3]).replace("\n",'')+";")
- f2.write(str(shunt_base[name][3]).replace("\n",'')+";")
- for name in range (sizeY8):
- f.write(str(swshunt_base[name][3]).replace("\n",'')+";")
- f2.write(str(swshunt_base[name][3]).replace("\n",'')+";")
- f.write("\n")
- f2.write("\n")
- f.close()
- f2.close()
-
- ## faire le test avant l ecriture des deux fichiers
- if PSSEParams['ALGORITHM']=='Economic Dispatch and Power Flow':
- PSSEParams['MVAR_COST'] = False
- for filename in logCSVfilename_UC:
- os.remove(filename)
- else:
- if not PSSEParams['UNIT_COMMITMENT']:
- for filename in logCSVfilename_UC:
- os.remove(filename)
-
-
- # Definition of size input/output
- inputDim = len(Laws.keys())+ int(N_1_fromFile)
- outputDim = 12 + 2*int(PSSEParams['ALGORITHM']=='Economic Dispatch and Power Flow')
-
-
- #Create dictionnary for different useful values to use psse function
- ## ??
- dico={'TStest':0,'Xt':[],'sizeY0':sizeY0,'sizeY1':sizeY1,'sizeY2':sizeY2,\
- 'sizeY3':sizeY3,'sizeY4':sizeY4,'sizeY5':sizeY5,'sizeY6':sizeY6,'sizeY7':sizeY7,'sizeY8':sizeY8, 'sizeY':sizeY,\
- 'folder':pathBigFolder,'folderN_1':folderN_1,\
- 'day':day,'position':0,'PSSEParams': PSSEParams,\
- '_i':_i,'_f':_f,'_s':_s,'lenpac':SimuParams['SIZE_PACKAGE'],\
- 'num_pac':0,'logCSVfilename':logCSVfilename,'logCSVfilename_UC':logCSVfilename_UC,'Laws':Laws,'CorrMatrix': CorrMatrix,\
- 'Generators':PSENconfig.MachineDico,'Loads':PSENconfig.LoadDico, 'Motors':PSENconfig.MotorDico,\
- 'Lines':PSENconfig.LineDico,'Transformers':PSENconfig.TransfoDico,\
- 'doc_base':'','continLines':continLines,'continTransfos':continTransfos,'timeVect':[],\
- 'continGroups':continGroups,'continLoads':continLoads,'continMotors':continMotors,'continVal':continVal,'continProb':continProb,\
- 'N_1_fromFile': N_1_fromFile,'all_inputs_init':all_inputs_after_OPF, 'AdjLoadTables':AdjLoadTables, 'Paths':Paths}
-
- if PSSEParams["ALGORITHM"]=="Optimum Power Flow":
- dico['flag2']=int(PSSEParams['MVAR_COST'])
- dico['UnitCommitment']= PSSEParams['UNIT_COMMITMENT']
- else:
- dico['flag2']=False
- dico['UnitCommitment']=False
-
-#===============================================================================
-# EXECUTION
-#===============================================================================
-
-
-
- print "\n\n\n Starting PSEN "
-
- inputSamp=[]
-
- outputSampleAll=NumericalSample(0,12 + 2*int(PSSEParams["ALGORITHM"]=="Economic Dispatch and Power Flow"))#initialization
- ymachine=NumericalSample(0,sizeY0)
- output=[]
-
- inputSamp=[]
- LStable=[]
- FStable=[]
- Pmachine=[]
-
- Ind1=[]
- Ind2=[]
-
- def function_callback(result): #define callback for a probabilistic study
- output.extend(result[0])
- inputSamp.extend(result[1])
- Pmachine.extend(result[2])
-
- def callback_indices(indices): #define callback function for probabilistic study
- Ind1.extend(indices[0])
- Ind2.extend(indices[1])
-
- def function_callback_psse(result): #define callback function for time study
- #print(result)
- output.extend(result[1])
- inputSamp.extend(result[0])#5])
- Pmachine.extend(result[2])#6])
-
-
- try :
- time_serie
-
- except NameError :
- print 'Probabilistic'
-
- #create new dico for each process which is going to be launched
- liste_dico=[]
- for i in range(num_cores):
- dico['num_pac']=i
- psspy.case(Paths['sav_file'])
- dico['doc_base']=os.path.join(pathBigFolder,'package'+str(i)+"_N"+folderN_1+day+"_"+hour) #working directory of each package
- psspy.save(os.path.join(dico['doc_base'],"BaseCase.sav" )) #create a initial case for each package
- RopFile = Paths['sav_file'][0:-4]+'.rop'
- RopFile2 = os.path.join(dico['doc_base'],"BaseCase.rop" )
- shutil.copy(RopFile,RopFile2)
-
- liste_dico.append(dico.copy()) #append a new dico to the list
- os.environ['PATH'] += ';' + dico['doc_base'] #add the path of each directory
-
- dico['TStest']=0
- cur_dir=os.getcwd() #get the current directory path
- tmp=sys.stdout #get the stdout path
-
- #pdb.set_trace()##################?
-
- po=multiprocessing.Pool(maxtasksperchild=1)
- m1=multiprocessing.Manager()
- m2=multiprocessing.Manager()
- data=m1.Queue()
- msg=m2.Queue()
- msg.put('ok')
-
-
- if nb_fix==0 or num_cores < nb_fix :
- print "Convergence criteria or fewer cores than packages to run"
-
- if Debug:
- #res=Convergence(data,msg,int(PSSEParams['MVAR_COST']),nb_fix,cmd_Path)
- res=Calculation(liste_dico[0].copy(),data,msg)
-
- else:
- #either for stop criteria or for a big number of package
- for l in range(num_cores+1):
- if l!=num_cores:
- p= po.apply_async(Calculation,args=(liste_dico[l].copy(),data,msg,),\
- callback=function_callback)
- else:
- p= po.apply_async(Convergence,args=(data,msg,int(PSSEParams['MVAR_COST']),nb_fix,cmd_Path,),\
- callback=callback_indices)
-
- po.close()
- po.join()
-
- elif num_cores>=nb_fix and nb_fix!=0:
- print "Fixed number of packages, fewer packages than cores"
-
- if Debug:
- #res=Convergence(data,msg,int(PSSEParams['MVAR_COST']),nb_fix,cmd_Path)
- res=Calculation(liste_dico[0].copy(),data,msg)
- else:
- #for a small number of packages
- for l in range(nb_fix+1):
-
- if l!=nb_fix:
- p= po.apply_async(Calculation,args=(liste_dico[l].copy(),data,msg,),\
- callback=function_callback)
- else:
- p= po.apply_async(Convergence,args=(data,msg,int(PSSEParams['MVAR_COST']),nb_fix,cmd_Path,),\
- callback=callback_indices)
- po.close()
- po.join()
-
-
- os.chdir(cur_dir) #back to the working directory
- sys.stdout=tmp #back to the shell stdout
-
-
- else:
- print 'Time series'
-
- dico['TStest']=1
- Xt=[]
- for i in range (len(time_serie_mat)) : #as many as there are points in the time serie
-
- Xt0=[]
- n=0
- for j in range (len(time_serie_flag)) : #for each variable
-
- if time_serie_flag[j] == -1 : #if not a time series
- Xt0.append(-1)
- n+=1
- else :
- Xt0.append(time_serie_mat[i][j-n]) #append the element
-
- Xt.append(Xt0)
-
- liste_dico=[]
- ipos=0
-
- RandomGenerator.SetSeed(os.getpid())
- inputDistribution=create_dist(dico)
- samples=[]
-
- #create new dico for each process which is going to be launched
- for i in range(num_cores):
- dico['num_pac']=i
- psspy.case(Paths['sav_file'])
- dico['doc_base']=os.path.join(pathBigFolder,'package'+str(i)+"_N"+folderN_1+day+'_'+hour) #working directory of each package
-
- if i==num_cores-1:
- dico['Xt']=Xt[ipos:len(Xt)]
- dico['timeVect']=timeVect[ipos:len(Xt)]
- else:
- dico['Xt']=Xt[ipos:int(((i+1)*np.ceil(float(len(Xt))/float(num_cores))))]
- dico['timeVect']=timeVect[ipos:int(((i+1)*np.ceil(float(len(Xt))/float(num_cores))))]
- ipos=int(((i+1)*round(float(len(Xt))/float(num_cores))))
-
- myMCE = MonteCarloExperiment(inputDistribution,len(dico['Xt']))
- Samp = myMCE.generate()
- samples.append(Samp)
-
- psspy.save(dico['doc_base']+"/BaseCase.sav" ) #create a initial case for each package
- liste_dico.append(dico.copy()) #append a new dico to the list
- os.environ['PATH'] += ';' + dico['doc_base'] #add the path of each directory
-
- cur_dir=os.getcwd() #get the current directory path
- tmp=sys.stdout #get the stdout path
-
-
- if NoMultiProcTS:
- inputSamp, output, Pmachine, LS, FS, LStable, FStable, Output_beforeUC, Pmachine_beforeUC, LS_beforeUC, FS_beforeUC, LStable_beforeUC, FStable_beforeUC = PSSEFunct(liste_dico[0].copy(),np.array(samples[0]))
-
- else:
- po=multiprocessing.Pool(maxtasksperchild=1) #create a multiprocessing.Pool object
- for l in range(num_cores):
- print "launching PACKAGE "+str(l)
- p= po.apply_async(PSSEFunct,args=(liste_dico[l].copy(),np.array(samples[l]),),\
- callback=function_callback_psse) #callback function
-
- po.close()
- po.join()
-
-# po=multiprocessing.Pool(maxtasksperchild=1) #create a multiprocessing.Pool object
-# results = [ po.apply(PSSEFunct,args=(liste_dico[l].copy(),np.array(samples[l]),)) for l in range(num_cores) ]
-#
-# for result in results:
-# output.extend(result[1])
-# inputSamp.extend(result[0])#5])
-## Pmachine.extend(result[2])#6])
-#
-# po.close()
-# po.join()
-
- os.chdir(cur_dir) #back to the working directory
- sys.stdout=tmp #back to the shell stdout
-
-
-#===============================================================================
-# RECUPERATION DONNEES DE SORTIES ET ECRITURE CSV - OUTPUT RETRIEVAL
-#===============================================================================
-
- print "Finished multiprocessing"
-
- for i in Pmachine:
- ymachine.add(NumericalPoint(i))
- ymachineMean=ymachine.computeMean()
-
- for i in output:
- outputSampleAll.add(NumericalPoint(i))
- outputDim=outputSampleAll.getDimension()
- outputSize=outputSampleAll.getSize()
-
- inputSample=NumericalSample(0,inputDim)
- for i in inputSamp:
- inputSample.add(NumericalPoint(i))
-
- outputSample=NumericalSample(0,outputDim)
- outputSampleMissed=NumericalSample(0,outputDim)
-
- for i in range (outputSize):
- #if outputSampleAll[i,inputDim]==0 :
- if outputSampleAll[i,3]==0 :
- outputSampleMissed.add(outputSampleAll[i])
- else :
- outputSample.add(outputSampleAll[i])
-
- outputDescription=[]
- for i in range (outputDim):
- outputDescription.append("Y"+str(i))
- outputSample.setDescription( outputDescription )
-
- # Get the empirical mean and standard deviations
- empMeanX = inputSample.computeMean()
- empSdX = inputSample.computeStandardDeviationPerComponent()
-
- if int(outputSample.getSize())>0:
- empiricalMean = outputSample.computeMean()
- empiricalSd = outputSample.computeStandardDeviationPerComponent()
- else:
- print "ALERT: Not a single scenario converged"
- empiricalMean = ["-"]*outputDim
- empiricalSd = ["-"]*outputDim
-
-
-
- # Writing
- CSVfilename=pathBigFolder+"\simulation_interestValues"+hour+".csv" # Name of the file : global variable
- f = open(CSVfilename, "a")
- f.write('CASES SIMULATED: '+str(outputSize)+'\n\n')
-
- f.write(';;Mean;Standard deviation\n')
-
- entete=entete.split(';')
- unit=unit.split(';')
-
- for name in range (inputDim+outputDim+sizeY0):
-
- if (name<inputDim):
- f.write(entete[name]+';'+unit[name]+';'+\
- str(empMeanX[name])+';'+str(empSdX[name])+'\n')
- if name==inputDim:
- f.write('\n')
-## f.write('\n'+entete[name]+';'+unit[name]+';'\
-## +str(empiricalMean[name-inputDim])+';'+\
-## str(empiricalSd[name-inputDim])+'\n')
- if (inputDim<name<inputDim+outputDim):
- #pdb.set_trace()
- f.write(entete[name]+';'+unit[name]+';'\
- +str(empiricalMean[name-inputDim-1])+';'+\
- str(empiricalSd[name-inputDim-1])+'\n')
- if name==(inputDim+outputDim):
- f.write("\nY:PMachine"+str(plants_base[name-(inputDim+outputDim)][0])+";"\
- +str(plants_base[name-(inputDim+outputDim)][8])+';'+\
- str(ymachineMean[name-(inputDim+outputDim)])+"\n")
- if (inputDim+outputDim<name):
- f.write("Y:PMachine"+str(plants_base[name-(inputDim+outputDim)][0])+";"\
- +str(plants_base[name-(inputDim+outputDim)][8])+';'+\
- str(ymachineMean[name-(inputDim+outputDim)])+"\n")
-
- if (int(PSSEParams['MVAR_COST'])): #if criteria on Load shed and mvar
- f.write('\n\nIndicator Load Shedding=;')
-
- f.write('Indicator Fixed Shunt=;')
-
- else:
- f.write('\n\nIndicator NumVoltage=;')
-
- f.write('Indicator NumTransit=;')
-
- f.write('\n')
- for i in range(len(Ind1)):
- f.write(str(Ind1[i])+';')
- f.write(str(Ind2[i])+'\n')
-
- f.close()
-
- CSVcomplete_filename=pathBigFolder+"\simulationDClog_complete_"+hour+".csv" # Name of the file : global variable
- f=open(CSVcomplete_filename,"a")
-
- liste_dico2 = []
- for k,dico in enumerate(liste_dico):
- package_folder = dico['doc_base']
- if os.path.isfile(os.path.join(dico['doc_base'],'Case_1.sav')):
- liste_dico2.append(dico)
- else:
- shutil.rmtree(dico['doc_base'])
-
-
-
- if dico['TStest']==1: #if Time series, different output file format
- for k,dico in enumerate(liste_dico2):
- package_folder = dico['doc_base']
- package_resultsfile = package_folder + "\\simulationDClog_" + hour + ".csv"
- g = open(package_resultsfile,"r")
- if k==0:
- f.write(g.read())
- else:
- g_contents = g.read()
- g_contents2 = g_contents.split('\n')
- g_contents_noheaders = '\n'.join(g_contents2[2:])
-## g_contents_noheaders = ''
-## for m in range(2,len(g_contents2)):
-## g_contents_noheaders+=g_contents2[m] + '\n'
- f.write(g_contents_noheaders)
- g.close()
-
- else: #if probabilistic, must treat table output
- for k,dico in enumerate(liste_dico2):
- package_folder = dico['doc_base']
- package_resultsfile = package_folder + "\\simulationDClog_" + hour + ".csv"
- g = open(package_resultsfile,"r")
- if k==0:
- g_contents=g.read()
- g_headers = g_contents.partition('\n')[0] + "\n"
- g_contents0 = g_contents.partition('\n')[2]
- g_headers += g_contents0.partition('\n')[0] + "\n"
- g_contents_noheaders = g_contents0.partition('\n')[2]
- g_iterations = g_contents_noheaders.partition('\n\n')[0]
- it_num = len(g_iterations.split('\n'))
- g_summarytable = g_contents_noheaders.partition('\n\n')[2]
- f.write(g_headers)
- f.write(g_iterations)
- f.write('\n')
- else:
- g_contents = g.read()
- g_contents_noheaders0 = g_contents.partition('\n')[2]
- g_contents_noheaders = g_contents_noheaders0.partition('\n')[2]
- g_iterations = g_contents_noheaders.partition('\n\n')[0]
- g_summarytable2 = g_contents_noheaders.partition('\n\n')[2]
- for line in g_summarytable2.split('\n')[2:]:
- if line != '':
- g_summarytable += line
- g_iterations_newnumbers = ""
- for line in g_iterations.split("\n"): #increment iteration numbers
- it_num += 1
- cells=line.split(';')
- cells[0]=str(it_num)
- newline=";".join(cells)+'\n'
- g_iterations_newnumbers+=newline
- f.write(g_iterations_newnumbers)
- g.close()
-
- f.write('\n\n' + g_summarytable) #write summary table at end
-
- f.close()
-
- if PSSEParams['ALGORITHM']=='Optimum Power Flow':
- if PSSEParams['UNIT_COMMITMENT']:
- # Write the second csv
- CSVcomplete_filename=pathBigFolder+"\simulationDClog_beforeUC_complete_"+hour+".csv" # Name of the file : global variable
- f=open(CSVcomplete_filename,"a")
-
- if dico['TStest']==1: #if Time series, different output file format
- for k,dico in enumerate(liste_dico2):
- package_folder = dico['doc_base']
- package_resultsfile = package_folder + "\\simulationDClog_beforeUC_" + hour + ".csv"
- g = open(package_resultsfile,"r")
- if k==0:
- f.write(g.read())
- else:
- g_contents = g.read()
- g_contents2 = g_contents.split('\n')
- g_contents_noheaders = '\n'.join(g_contents2[2:])
- f.write(g_contents_noheaders)
- g.close()
-
- else: #if probabilistic, must treat table output
- for k,dico in enumerate(liste_dico2):
- ExtraNL = False
- package_folder = dico['doc_base']
- package_resultsfile = package_folder + "\\simulationDClog_beforeUC_" + hour + ".csv"
- g = open(package_resultsfile,"r")
- if k==0:
- g_contents=g.read()
- g_headers = g_contents.partition('\n')[0] + "\n"
- g_contents0 = g_contents.partition('\n')[2]
- g_headers += g_contents0.partition('\n')[0] + "\n"
- g_contents_noheaders = g_contents0.partition('\n')[2]
- g_iterations = g_contents_noheaders.partition('\n\n')[0]
- g_iterations_split = g_iterations.split('\n')
- if g_iterations_split[-1]=="":
- g_iterations_split = g_iterations_split[0:-1]
- it_num = len(g_iterations_split)
- g_summarytable = g_contents_noheaders.partition('\n\n')[2]
- f.write(g_headers)
- #f.write(g_iterations)
- for line in g_iterations_split:
- f.write(line)
- f.write('\n')
- #f.write('\n')
- else:
- g_contents = g.read()
- g_contents_noheaders0 = g_contents.partition('\n')[2]
- g_contents_noheaders = g_contents_noheaders0.partition('\n')[2]
- g_iterations = g_contents_noheaders.partition('\n\n')[0]
- g_iterations_split = g_iterations.split('\n')
- if g_iterations_split[-1]=="":
- g_iterations_split = g_iterations_split[0:-1]
- g_summarytable2 = g_contents_noheaders.partition('\n\n')[2]
- for line in g_summarytable2.split('\n')[2:]:
- if line != '':
- g_summarytable += line
- g_iterations_newnumbers = ""
- for line in g_iterations_split: #increment iteration numbers
- it_num += 1
- cells=line.split(';')
- cells[0]=str(it_num)
- newline=";".join(cells)+'\n'
- g_iterations_newnumbers+=newline
- f.write(g_iterations_newnumbers)
- g.close()
-
- f.write('\n\n' + g_summarytable) #write summary table at end
-
- f.close()
-
- #convert decimal separator to commas for csv files
- if PSSEParams['DECIMAL_SEPARATOR']==",":
- csvlist = []
- for path, subdirs, files in os.walk(pathBigFolder):
- for name in files:
- if name.endswith(".csv"):
- csvlist.append(os.path.join(path, name))
- for csvfile in csvlist:
- h = open(csvfile,"rb")
- crd = csv.reader(h,delimiter=";")
- csvfiletemp = csvfile[0:-4] + "0" + ".csv"
- g = open(csvfiletemp, "wb")#, newline='\n')
- cwt = csv.writer(g, delimiter=";")
- for row in crd:
- rowwcommas = []
- for item in row:
- try:
- isnum = float(item)+1
- rowwcommas.append(str(item).replace(".",","))
- except:
- rowwcommas.append(item)
- cwt.writerow(rowwcommas)
- h.close()
- g.close()
- os.remove(csvfile)
- shutil.copy2(csvfiletemp, csvfile)
- os.remove(csvfiletemp)
-
-
- f=open(exec_file,'a')
- stop_time=time.clock()
- stop_time=time.clock()
- f.write("Stop time: %f; Duration: %f; Time per execution: %f; " \
- % (round(stop_time), round(stop_time-start_time), round((stop_time-start_time)/outputSize)))
- f.write("\n\n")
- f.close()
-
- print '\n\nSimulated '+str(outputSize)+' cases in '+ str(round(stop_time-start_time))+\
- ' seconds. Average '+str(round((stop_time-start_time)/outputSize))+'s per case.'
-
- nMissed=int(outputSampleMissed.getSize())
-
- print '\n\n Non-convergence rate is '+str(round(nMissed*100/outputSize,3))\
- +' % ('+str(outputSampleMissed.getSize())+' cases out of '+str(outputSize)+')'
-
- #graphical_out(inputSample, outputSampleAll, inputDim, outputDim, montecarlosize)
+++ /dev/null
-
-############################################################
-# ojectif de ce module: calcul opf pour chaque studycase
-############################################################
-
-import os,sys,pickle,time
-# from support_functionsPF import *#Valentin
-from support_functionsPF import read_pfd,read_pfd_simple,np, config_contingency
-# import PSENconfig # Valentin
-# sys.path.append(PSENconfig.Dico['DIRECTORY']['PF_path'])#Valentin
-# os.environ['PATH'] += ';' + os.path.dirname(os.path.dirname(PSENconfig.Dico['DIRECTORY']['PF_path'])) + ';'#Valentin
-import powerfactory
-import PSENconfig
-import shutil
-import pdb
-import csv
-tempdir = r'C:\Logiciels DER\PSEN_PF_V4\Example\Results'
-
-app = powerfactory.GetApplication()
-
-# app.ActivateProject('39 genstatpvmoteur(4)')#Valentin
-prj = app.GetActiveProject()
-case = app.GetActiveStudyCase()#prj.GetContents('Case_0.IntCase',1)[0]
-# case = prj.GetContents('Case_46.IntCase',1)[0]#Valentin
-# case.Activate()#Valentin
-#app.Show()#Valentin
-
-#[busnumber, outserv, idplant, 0, 0, 0, 0, 0, busname, 0, 0,plant, pgini, pgini_a]
-def saveOPFresults(plants):
- #save OPF results: P, Q of generators, Transfo taps, Switched shunt settings, Load-shedding
- upload = app.GetFromStudyCase('ComDbupd') #Sélection commande de mise à jour BDD
- upload.iopt_lod = 0 # Sélection paramètre MAJ Facteur d'échelle de charge : NON
- upload.iopt_trf = 1 # Sélection paramètre MAJ Prises de transfos : OUI
- upload.iopt_distTrf = 1 # Sélection paramètre MAJ Prises de transfos de distrib : OUI
- upload.iopt_shnt = 1 # Sélection paramètre MAJ pas capacitif shunts/filtres : OUI
- upload.iopt_lodpq = 0 # Sélection paramètre MAJ P,Q charges : OUI ou NON (selon si on veut ou pas prendre en compte le délestage dans l'initialisation)
- upload.iopt_asmpq = 1 # Sélection paramètre MAJ P,Q machines asynchrones : OUI
- #upload.iopt_sympqv = 1 # Sélection paramètre MAJ P,Q,V machines synchrones + statiques : OUI
- upload.iopt_sympqv = 0 # Sélection paramètre MAJ P,Q,V machines synchrones + statiques : NON
- upload.iopt_upd = 0 # Option de ne pas mettre à jour la puissance réactive activée
- upload.iopt_tap = 1 # Option de mettre à jour toutes les prises des transfos
- upload.Execute() # Exécution mise à jour BDD
-
- #save P,Q of dispatchable machines (because we dont want to save non-dispatchable machines with triggers (laws)
- for plant in plants:
- #if str(plant[11]).endswith('.ElmSym'):
- try:
- if plant[11].ictpg == 1:
- plant[11].pgini = plant[3]
- plant[11].qgini = plant[4]
-# else: #non-dispatchable machine
-# triggers = plant[11].GetChildren(1, 'pgini.Charef', 1)
-# if len(triggers) == 0:
-# plant[11].qgini = plant[4]
- except:
- pass
-
- return
-
-#def saveOPFresultsLS():
-# #save OPF results: P, Q of generators, Transfo taps, Switched shunt settings, Load-shedding
-# upload = app.GetFromStudyCase('ComDbupd') #Sélection commande de mise à jour BDD
-# upload.iopt_lod = 1 # Sélection paramètre MAJ Facteur d'échelle de charge : NON
-# upload.iopt_trf = 1 # Sélection paramètre MAJ Prises de transfos : OUI
-# upload.iopt_distTrf = 1 # Sélection paramètre MAJ Prises de transfos de distrib : OUI
-# upload.iopt_shnt = 1 # Sélection paramètre MAJ pas capacitif shunts/filtres : OUI
-# upload.iopt_lodpq = 1 # Sélection paramètre MAJ P,Q charges : OUI ou NON (selon si on veut ou pas prendre en compte le délestage dans l'initialisation)
-# upload.iopt_asmpq = 1 # Sélection paramètre MAJ P,Q machines asynchrones : OUI
-# upload.iopt_sympqv = 1 # Sélection paramètre MAJ P,Q,V machines synchrones + statiques : OUI
-# upload.iopt_upd = 0 # Option de ne pas mettre à jour la puissance réactive activée
-# upload.iopt_tap = 1 # Option de mettre à jour toutes les prises des transfos
-# upload.Execute() # Exécution mise à jour BDD
-# return
-
-
-nn=int(''.join(ele for ele in case.loc_name if ele.isdigit()))# cas number
-cas = int(nn)
-scenario_temporaire = app.GetActiveScenario()
-if scenario_temporaire:
- scenario_temporaire.Deactivate()
- scenario_temporaire.Delete()
-app.SaveAsScenario('temp0_'+str(nn), 1) # creer scenario pour sauvegarder le cas de base
-scenario_temporaire0 = app.GetActiveScenario()
-scenario_temporaire0.Save()
-scenario_temporaire0.Deactivate()
-
-start = time.clock();
-with open('data_dico', 'rb') as fichier:
- mon_depickler = pickle.Unpickler(fichier)
- dico = mon_depickler.load()
-LS_allowed=dico['PFParams']['LOAD_SHEDDING_ALLOWED']
-TStest=dico['TStest']
-position=dico['position']
-PFParams=dico['PFParams']
-sizeY0=dico['sizeY0']
-sizeY1=dico['sizeY1']
-sizeY2=dico['sizeY2']
-sizeY3=dico['sizeY3']
-sizeY4=dico['sizeY4']
-sizeY5=dico['sizeY5']
-sizeY6=dico['sizeY6']
-sizeY7=dico['sizeY7']
-sizeY8=dico['sizeY8']
-sizeY=dico['sizeY']
-gen_UC_list = []
-
-Irate_num = 1
-num_pac = dico['num_pac']
-all_inputs_base = read_pfd_simple(app, prj.loc_name)
-plants_base = all_inputs_base[0]
-loads_base = all_inputs_base[1]
-shunt_base = all_inputs_base[2]
-swshunt_base = all_inputs_base[3]
-
-
-# Total initial (fixed) shunt on buses
-init_shunt = 0
-for i in range(len(shunt_base)):
- init_shunt += float(shunt_base[i][2])
-
-
-
-if dico['UnitCommitment']:
-
- app.SaveAsScenario('Case_' + str(nn) + '_beforeUC', 1) # creer scenario pour sauvegarder le cas de base
- scenario_beforeUC = app.GetActiveScenario()
-
- opf = app.GetFromStudyCase('ComOpf')
-
- erropf = opf.Execute()# lancer opf
- # Traitement specifique pour resoudre des cas difficle a converger
- if (erropf == 1) and (PFParams['OBJECTIVE_FUNCTION'] == 'MINIMISATION_OF_COST') and PFParams['NON_COST_OPTIMAL_SOLUTION_ALLOWED']:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- ldf = app.GetFromStudyCase('ComLdf')
- ldf.iopt_initOPF = 1 # utiliser pour OPF
- ldf.Execute()
- opf.iInit = 1
- erropf = opf.Execute() # lancer opf avec 'cst'
- print(' Run LDF for OPF ')
- if erropf == 0: print(' OK grace a LDF initial ')
- else:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- aa = 0
- while erropf == 1: # si cst ne marche pas
- scenario_temporaire0.Apply(0)#recuperer scenario initiale
- aa += 1
- opf.iopt_obj = 'los' # Fonction objectif = minimisation de la perte totale du reseau
- erropf = opf.Execute() # run opf los
- if erropf == 1:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- print(' flat-start to OPF loss ! ! ! ')
- opf.iInit = 0 # flatstart opf loss
- erropf = opf.Execute()
- if erropf == 1:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- break
- opf.iInit = 1
- print(' Run OPF loss ')
- if erropf == 0: # si loss marche bien
- if (aa == 2)and(LS_allowed):
- opf.iopt_obj = 'shd'
- opf.Execute()
- if aa == 3:
- # print(' ++++++++++++++++++++++++++++prendre le resultat du OPF LOSS')
- # erropf = 1
- # scenario_temporaire0.Apply(0) # recuperer scenario initiale
-
- filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_LOSS' + '.shdUC','w')
- #filew = open(tempdir + '/Case_' + str(nn) + '_LOSS' + '.shdUC','w')
- filew.write('Case_' + str(nn))
- filew.close()
- break
- opf.iopt_obj = 'cst'
- erropf = opf.Execute() # relancer opt cst
- if erropf == 0:
- if (aa == 2)and(LS_allowed):
- print(' ==================== basculer los-shd')
- else:
- print(' OK grace a OPF LOSS =======================LOSS in case aa=' + str(aa))
- if (erropf==1)and(LS_allowed):
- aa = 0
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- ldf.Execute() # initiale valeur pour opf shd
- # opf.iInit = 1
- while erropf == 1:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- aa += 1
- opf.iopt_obj = 'shd' # Fonction objectif = minimisation de la perte totale du reseau
- erropf = opf.Execute()
- if erropf == 1:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- print(' flat-stat to OPF shd ! ! ! 222 ')
- opf.iInit = 0
- erropf = opf.Execute()
- if erropf == 1:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- break
- opf.iInit = 1
- print(' Run OPF SHD ')
- if erropf == 0: # si shd marche bien
- if aa == 2:
- opf.iopt_obj = 'los'
- opf.Execute()
- if aa == 3:
- print(' +++++++++++++++++++++++++prendre le resultat du OPF SHD')
- filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn)+'_SHD' + '.shdUC','w')
- #filew = open(tempdir + '/Case_' + str(nn)+'_SHD' + '.shdUC','w')
- filew.write('Case_' + str(nn) )
- filew.close()
- break
- opf.iopt_obj = 'cst'
- erropf = opf.Execute() # relancer opt cst
- if erropf == 0:
- if aa == 2:
- print('=== ========== basculer shd-los')
- # filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_shdlosscost' + '.shdUC', 'w')
- # filew.write('Case_' + str(nn))
- # filew.close()
- else:
- print(' OK grace a OPF SHD -------------------------------Load SHEDDING in case aa=' + str(aa))
- # filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_shdcost' + '.shdUC','w')
- # filew.write('Case_' + str(nn))
- # filew.close()
-
-
- loadShed = [[], [], [], [], []]
- fxshnt = [[], [], []]
- indexLS = []
- indexFS = []
- indicLS = 0
- indicFS = 0
- flagLS = 0
- flagFS = 0
- ok = False
-
- if erropf == 0:
- ok = True
- else:
- ok = False
-
- if ok == True:
-
- all_inputs = read_pfd(app, prj.loc_name, recal=0)
-
- # start = stop; # ++++++++++++++++
- buses = []
- [buses.append(bus[0:8]) for bus in all_inputs[0]]
- lines = []
- [lines.append(bus[0:11]) for bus in all_inputs[1]]
- transf = []
- [transf.append(bus[0:11]) for bus in all_inputs[2]]
- plants = []
- [plants.append(bus[0:12]) for bus in all_inputs[3]]
- loads = []
- [loads.append(bus[0:7]) for bus in all_inputs[4]]
- shunt = []
- [shunt.append(bus[0:7]) for bus in all_inputs[5]]
- motors = []
- [motors.append(bus[0:6]) for bus in all_inputs[6]]
- transf3 = []
- [transf3.append(bus[0:14]) for bus in all_inputs[7]]
- swshunt = []
- [swshunt.append(bus[0:6]) for bus in all_inputs[8]]
-
- # Extraction of the load shedding quantities
- for ii in range(len(loads)):
- LSscale = loads[ii][6].GetAttribute('s:scale')
- P_setpoint = loads[ii][6].GetAttribute('s:pini_set')
- LS = (1-LSscale) * P_setpoint
- if abs(LS)>0.1:
- indexLS.append(ii)
- flagLS = 1 # raise flag loadshedding
- loadShed[0].append(nn) # Position seems to correspond to the number of the case we are treating
- loadShed[1].append(loads[ii][0]) #busnumber
- loadShed[2].append(loads[ii][4]) #busname
- loadShed[3].append(LS)
- loadShed[4].append(loads[ii][1]) #remaining load (voltage rectified)
-
-
-# if abs(loads[ii][1] - loads_base[ii][1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
-# indexLS.append(ii)
-# flagLS = 1 # raise flag loadshedding
-# loadShed[0].append(nn) # Position seems to correspond to the number of the case we are treating
-# # loadShed[0].extend(['' for i in range(len(indexLS) - 1)])
-# loadShed[1].append(loads[ii][0])
-# loadShed[2].append(loads[ii][4])
-# loadShed[3].append(loads_base[ii][1] - loads[ii][1])
-# loadShed[4].append(loads[ii][1])
-
-
- indicLS = sum(loadShed[3]) # sum all Effective MW loads
- loadShed = list(zip(*loadShed)) # transpose the matrix
-
- for ii in range(len(shunt)):
- if abs(shunt[ii][1] - shunt_base[ii][1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
- indexFS.append(ii)
- flagFS = 1 # raise flag loadshedding
- fxshnt[0].append(nn) # Position seems to correspond to the number of the case we are treating
- # fxshnt[0].extend(['' for i in range(len(indexFS) - 1)])
- fxshnt[1].append(shunt[ii][0])
- fxshnt[2].append(shunt[ii][2])
- indicFS = sum(fxshnt[2]) # sum all Effective MW loads
- fxshnt = list(zip(*fxshnt)) # transpose the matrix
-
- #save OPF results in study case before disconnecting gens
- saveOPFresults(plants)
-# if opf.iopt_obj=='shd':# and indicLS > 0.1*len(loads_base):
-## for ind in indexLS: # only act on loads that have been shed
-## load = loads_base[ind]
-## #if load[11].iShedding == 1: # if loadshedding allowed on the bus
-# for ind,load in enumerate(loads_base):
-# try: #disactivate triggers, save results
-# loadPscale = load[6].GetChildren(1, 'plini.Charef', 1)
-# loadQscale = load[6].GetChildren(1, 'qlini.Charef', 1)
-# loadPscale[0].outserv = 1
-# loadQscale[0].outserv = 1
-# load[6].plini = loads[ind][1]
-# load[6].qlini = loads[ind][2]
-# except:
-# pass
- scenario_beforeUC.Save()
-
- #scenario_beforeUC.Deactivate()
-
- #gen_UC_list = []
- for item in plants:
- bus = item[0]
- status = item[1]
- _id = item[2]
- pgen = item[3]
- pmax = item[6]
- try: #will only work for synchronous machines
- pdispatch = item[11].ictpg
- except:
- pdispatch=0
- if int(pdispatch)==1 and (abs(pgen) <= pmax * 0.02): # if generates at less than 2% of Pmax
- #if (abs(pgen) <= pmax * 0.02):
- if status == 0:
- if not gen_UC_list: #len(gen_UC_list)==0:
- app.SaveAsScenario('Case_' + str(nn), 1) # creer scenario pour sauvegarder les disponibilites des generateurs
- scenario_UC = app.GetActiveScenario()
- # disconnect the plant
- for plant in plants_base: # chercher l'objet represente generateur
- if (plant[0] == bus) and (plant[2] == _id) and (
- plant[11].ip_ctrl != 1): #and plant[11].ictpg==1: # not reference bus
- plant[11].outserv = 1 # desactiver le groupe
- outs = plant[11].GetChildren(1, 'outserv.Charef', 1)
- if outs:
- outs[0].outserv = 1 # desactive Trigger outserv pour etre sure que le groupe va etre desactive
- gen_UC_list.append((bus, _id))
-
- if gen_UC_list: #len(gen_UC_list)!=0:
- scenario_UC.Save()
- app.SaveAsScenario('tempUC0_'+str(nn), 1) # creer scenario pour sauvegarder le cas de base
- scenario_temporaireUC0=app.GetActiveScenario()
- scenario_temporaireUC0.Save()
- scenario_temporaireUC0.Deactivate()
-# scenario_temporaireUC0 = scenarioUC
-
- #scenario_temporaireUC0=app.GetActiveScenario()
- #scenario_temporaireUC0.Save()
- #scenario_temporaireUC0.Deactivate()
- #scenario_temporaireUC0=scenario_UC
-
- # 3. Affiche Y
- # sizeY4 = len(shunt)
- y = np.zeros(2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY3 + 2 * sizeY6 + sizeY4 + sizeY8 + 3 * sizeY5 + 3 * sizeY7)
- z = [0] * 13
- rate_mat_index = Irate_num + 2
- rate_mat_index_3w = Irate_num + 4
- Ymac = np.zeros(sizeY0)
- if ok:
- # Creates the quantities of interest
- for i in range(sizeY2):
- if lines[i][rate_mat_index] > 100:
- z[0] += 1 # Number of lines above 100% of their limits
- for i in range(sizeY5):
- if transf[i][rate_mat_index] > 100:
- z[1] += 1 # Number of transformers above 100% of their limits
- for i in range(sizeY7):
- if transf3[i][rate_mat_index_3w] > 100:
- z[1] += 1 # Add number of 3w transformers above 100% of their limits
- for i in range(sizeY1):
- if buses[i][2] > buses[i][5]:
- z[2] += 1
- if buses[i][2] < buses[i][4]:
- z[2] += 1 # Number of buses outside of their voltage limits
- for i in range(sizeY0):
- z[3] += float(plants[i][3]) # Total active production
- for i in range(sizeY3):
- z[4] += float(loads[i][1]) # Total active consumption
- for i in range(sizeY6):
- z[4] += float(motors[i][1]) # add total active consumption from motors
- z[5] = (z[3] - z[4]) / z[3] * 100 # Active power losses
- for i in range(sizeY2):
- if lines[i][rate_mat_index] > z[6]:
- z[6] = lines[i][rate_mat_index] # Max flow in lines
- for i in range(sizeY5):
- if transf[i][rate_mat_index] > z[7]:
- z[7] = transf[i][rate_mat_index] # Max flow in transformers
- for i in range(sizeY7):
- if transf[i][rate_mat_index] > z[7]:
- z[7] = transf3[i][rate_mat_index_3w] # Max flow in 3w transformers
- for i in range(sizeY2):
- if lines[i][rate_mat_index] > 90:
- z[8] += 1
- z[8] = z[8] - z[0] # Number of lines between 90% and 100% of their limits
- for i in range(sizeY5):
- if transf[i][rate_mat_index] > 90:
- z[9] += 1
- for i in range(sizeY7):
- if transf3[i][rate_mat_index_3w] > 90:
- z[9] += 1
- z[9] = z[9] - z[1] # Number of transformers between 90% and 100% of their limits
-
- z[10] = indicFS
- z[11] = indicLS
- z[12] = str(gen_UC_list)
-
- # Creates the output vectors
- for Pmach in range(sizeY0):
- y[Pmach] = float(plants[Pmach][3])
- Ymac[Pmach] = float(plants[Pmach][3])
- for Qmach in range(sizeY0):
- y[Qmach + sizeY0] = float(plants[Qmach][4])
- for Vbus in range(sizeY1):
- y[Vbus + 2 * sizeY0] = float(buses[Vbus][2])
- for Iline in range(sizeY2):
- y[Iline + 2 * sizeY0 + sizeY1] = float(lines[Iline][rate_mat_index])
- for Pline in range(sizeY2):
- y[Pline + 2 * sizeY0 + sizeY1 + sizeY2] = float(lines[Pline][6])
- for Qline in range(sizeY2):
- y[Qline + 2 * sizeY0 + sizeY1 + 2 * sizeY2] = float(lines[Qline][7])
- for Itrans in range(sizeY5):
- y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2] = float(transf[Itrans][rate_mat_index])
- for Ptrans in range(sizeY5):
- y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY5] = float(transf[Ptrans][6])
- for Qtrans in range(sizeY5):
- y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 2 * sizeY5] = float(transf[Qtrans][7])
- for Itrans in range(sizeY7):
- y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5] = float(
- transf3[Itrans][rate_mat_index_3w])
- for Ptrans in range(sizeY7):
- y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + sizeY7] = float(transf3[Ptrans][8])
- for Qtrans in range(sizeY7):
- y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 2 * sizeY7] = float(transf3[Qtrans][9])
- for Pload in range(sizeY3):
- y[Pload + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7] = float(loads[Pload][1])
- for Pmotor in range(sizeY6):
- y[Pmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3] = float(
- motors[Pmotor][1])
- for Qmotor in range(sizeY6):
- y[Qmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + sizeY6] = float(
- motors[Qmotor][2])
- for Qshunt in range(sizeY4):
- y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6] = float(
- shunt[Qshunt][4])
- for Qshunt in range(sizeY8):
- y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6 + sizeY4] = float(
- swshunt[Qshunt][4])
-
- # nz = len(z)
- #scenario_temporaireUC.Deactivate()
- #scenario_temporaireUC.Delete()
-
- res_beforeUC = [list(y), list(z), list(Ymac), indicLS, indicFS, list(loadShed),
- list(fxshnt)] # sauvegarder le resultat dans un fichier pickle
- with open(dico['doc_base'] + '/' + app.GetActiveStudyCase().loc_name + '.before', 'wb') as fichier:
- mon_pickler = pickle.Pickler(fichier, protocol=2)
- mon_pickler.dump(res_beforeUC)
-
-
- if len(gen_UC_list) == 0:
- del z[-1]
- #change scenario name
- scenario_beforeUCpost=app.GetActiveScenario()
- app.SaveAsScenario('Case_' + str(nn), 1) # creer scenario pour sauvegarder le cas de base
- #scenario_beforeUCpost.Save()
- scenario_beforeUC.Delete()
-
- #copy No cost OPF convergence cases for post-UC as well, because no additional treatment will be done.
- for filename in os.listdir(os.path.dirname(os.path.realpath(__file__))):
- #for filename in os.listdir(tempdir):
- if filename.endswith('.shdUC'):
- #filew = open(os.path.dirname(os.path.realpath(__file__)) + filename + 'UC','w')
- shutil.copy2(os.path.join(os.path.dirname(os.path.realpath(__file__)), filename), os.path.join(os.path.dirname(os.path.realpath(__file__)),filename[0:-2]))
- #shutil.copy2(os.path.join(tempdir, filename), os.path.join(tempdir,filename[0:-2] ))
- #filew.close()
-
- #----------------------------------RE-run after unit commitment step--------------------------------------------------
- if len(gen_UC_list)!=0:
-
- scenario_UC.Activate()
-
- opf = app.GetFromStudyCase('ComOpf')
-
- opf.iInit = 0
- erropf = opf.Execute()
- # Traitement specifique pour resoudre des cas difficle a converger
- if (erropf == 1) and (PFParams['OBJECTIVE_FUNCTION'] == 'MINIMISATION_OF_COST') and PFParams['NON_COST_OPTIMAL_SOLUTION_ALLOWED']:
- scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
- ldf = app.GetFromStudyCase('ComLdf')
- ldf.iopt_initOPF = 1 # utiliser pour OPF
- ldf.Execute()
- opf.iInit = 1
- erropf = opf.Execute() # lancer opf avec 'cst'
- print(' Run LDF for OPF ')
- if erropf == 0: print(' OK grace a LDF initial ')
- else:
- scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
- aa = 0
- while erropf == 1: # si cst ne marche pas
- scenario_temporaireUC0.Apply(0)#recuperer scenario initiale
- aa += 1
- opf.iopt_obj = 'los' # Fonction objectif = minimisation de la perte totale du reseau
- erropf = opf.Execute() # run opf los
- if erropf == 1:
- scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
- print(' flat-stat to OPF loss ! ! ! ')
- opf.iInit = 0 # flatstart opf loss
- erropf = opf.Execute()
- if erropf == 1:
- scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
- break
- opf.iInit = 1
- print(' Run OPF loss OK ')
- if erropf == 0: # si los marche bien
- if (aa == 2)and(LS_allowed):
- opf.iopt_obj = 'shd'
- opf.Execute()
- if aa == 3:
- # print(' ++++++++++++++++++++++++++++prendre le resultat du OPF LOSS')
- # erropf = 1
- # scenario_temporaire0.Apply(0) # recuperer scenario initiale
-
- filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_LOSS' + '.shd', 'w')
- #filew = open(tempdir + '/Case_' + str(nn) + '_LOSS' + '.shd', 'w')
- filew.write('Case_' + str(nn))
- filew.close()
- break
- opf.iopt_obj = 'cst'
- erropf = opf.Execute() # relancer opt cst
- if erropf == 0:
- if (aa == 2)and(LS_allowed):
- print(' ==================== basculer los-shd')
- else:
- print(' OK grace a OPF LOSS =======================LOSS in case aa=' + str(aa))
- if (erropf==1)and(LS_allowed):
- aa = 0
- scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
- ldf.Execute() # initiale valeur pour opf shd
- # opf.iInit = 1
- while erropf == 1:
- scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
- aa += 1
- opf.iopt_obj = 'shd' # Fonction objectif = minimisation de la perte totale du reseau
- erropf = opf.Execute()
- if erropf == 1:
- scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
- print(' flat-stat to OPF shd ! ! ! 222 ')
- opf.iInit = 0
- erropf = opf.Execute()
- if erropf == 1:
- scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
- break
- opf.iInit = 1
- print(' Run OPF SHD ')
- if erropf == 0: # si shd marche bien
- if aa == 2:
- opf.iopt_obj = 'los'
- opf.Execute()
- if aa == 3:
- print(' +++++++++++++++++++++++++prendre le resultat du OPF SHD')
- filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_SHD' + '.shd', 'w')
- #filew = open(tempdir + '/Case_' + str(nn) + '_SHD' + '.shd', 'w')
- filew.write('Case_' + str(nn))
- filew.close()
- break
- opf.iopt_obj = 'cst'
- erropf = opf.Execute() # relancer opt cst
- if erropf == 0:
- if aa == 2:
- print('=== ========== basculer shd-los')
- # filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str( nn) + '_shdlosscost' + '.shd', 'w')
- # filew.write('Case_' + str(nn))
- # filew.close()
- else:
- print( ' OK grace a OPF SHD -------------------------------Load SHEDDING in case aa=' + str( aa))
- # filew = open( os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_shdcost' + '.shd', 'w')
- # filew.write('Case_' + str(nn))
- # filew.close()
-
- # Fin du traitement specifique pour resoudre des cas difficle a converger
-
- loadShed = [[], [], [], [], []]
- fxshnt = [[], [], []]
- indexLS = []
- indexFS = []
- indicLS = 0
- indicFS = 0
- flagLS = 0
- flagFS = 0
- ok = False
-
- if erropf == 0:
- ok = True
- else:
- ok = False
-
- if ok == True:
-
- all_inputs = read_pfd(app, prj.loc_name, recal=0)
- stop = time.clock();
- start = stop; # ++++++++++++++++
- buses = []
- [buses.append(bus[0:8]) for bus in all_inputs[0]]
- lines = []
- [lines.append(bus[0:11]) for bus in all_inputs[1]]
- transf = []
- [transf.append(bus[0:11]) for bus in all_inputs[2]]
- plants = []
- [plants.append(bus[0:11]) for bus in all_inputs[3]]
- loads = []
- [loads.append(bus[0:7]) for bus in all_inputs[4]]
- shunt = []
- [shunt.append(bus[0:7]) for bus in all_inputs[5]]
- motors = []
- [motors.append(bus[0:6]) for bus in all_inputs[6]]
- transf3 = []
- [transf3.append(bus[0:14]) for bus in all_inputs[7]]
- swshunt = []
- [swshunt.append(bus[0:6]) for bus in all_inputs[8]]
-
- # Extraction of the load shedding quantities
-
-
- for ii in range(len(loads)):
- LSscale = loads[ii][6].GetAttribute('s:scale')
- P_setpoint = loads[ii][6].GetAttribute('s:pini_set')
- LS = (1-LSscale) * P_setpoint
- if abs(LS)>0.1:
- indexLS.append(ii)
- flagLS = 1 # raise flag loadshedding
- loadShed[0].append(nn) # Position seems to correspond to the number of the case we are treating
- loadShed[1].append(loads[ii][0]) #busnumber
- loadShed[2].append(loads[ii][4]) #busname
- loadShed[3].append(LS)
- loadShed[4].append(loads[ii][1]) #remaining load (voltage rectified)
-
-
-# if abs(loads[ii][1] - loads_base[ii][1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
-# indexLS.append(ii)
-# flagLS = 1 # raise flag loadshedding
-#
-# loadShed[0].append( nn) # Position seems to correspond to the number of the case we are treating
-# # loadShed[0].extend(['' for i in range(len(indexLS) - 1)])
-# loadShed[1].append(loads[ii][0])
-# loadShed[2].append(loads[ii][4])
-# loadShed[3].append(loads_base[ii][1] - loads[ii][1])
-# loadShed[4].append(loads[ii][1])
-
-
- indicLS = sum(loadShed[3]) # sum all Effective MW loads
- loadShed = list(zip(*loadShed)) # transpose the matrix
-
- for ii in range(len(shunt)):
- if abs(shunt[ii][1] - shunt_base[ii][1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
- indexFS.append(ii)
- flagFS = 1 # raise flag loadshedding
- fxshnt[0].append(nn) # Position seems to correspond to the number of the case we are treating
- # fxshnt[0].extend(['' for i in range(len(indexFS) - 1)]) # why [0] ? Maybe it would be better to have 2 lists ? Or a dict ?
- fxshnt[1].append(shunt[ii][0])
- fxshnt[2].append(shunt[ii][2])
- indicFS = sum(fxshnt[2]) # sum all Effective MW loads
- fxshnt = list(zip(*fxshnt)) # transpose the matrix
-
- # 3. Affiche Y
- # sizeY4 = len(shunt)
- y = np.zeros(2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY3 + 2 * sizeY6 + sizeY4 + sizeY8 + 3 * sizeY5 + 3 * sizeY7)
- z = np.zeros(12) # np.zeros returns a new array of the given shape and type filled with zeros
- rate_mat_index = Irate_num + 2
- rate_mat_index_3w = Irate_num + 4
- Ymac = np.zeros(sizeY0)
- if ok:
- # Creates the quantities of interest
- for i in range(sizeY2):
- if lines[i][rate_mat_index] > 100:
- z[0] += 1 # Number of lines above 100% of their limits
- for i in range(sizeY5):
- if transf[i][rate_mat_index] > 100:
- z[1] += 1 # Number of transformers above 100% of their limits
- for i in range(sizeY7):
- if transf3[i][rate_mat_index_3w] > 100:
- z[1] += 1 # Add number of 3w transformers above 100% of their limits
- for i in range(sizeY1):
- if buses[i][2] > buses[i][5]:
- z[2] += 1
- if buses[i][2] < buses[i][4]:
- z[2] += 1 # Number of buses outside of their voltage limits
- for i in range(sizeY0):
- z[3] += float(plants[i][3]) # Total active production
- for i in range(sizeY3):
- z[4] += float(loads[i][1]) # Total active consumption
- for i in range(sizeY6):
- z[4] += float(motors[i][1]) # add total active consumption from motors
- z[5] = (z[3] - z[4]) / z[3] * 100 # Active power losses
- for i in range(sizeY2):
- if lines[i][rate_mat_index] > z[6]:
- z[6] = lines[i][rate_mat_index] # Max flow in lines
- for i in range(sizeY5):
- if transf[i][rate_mat_index] > z[7]:
- z[7] = transf[i][rate_mat_index] # Max flow in transformers
- for i in range(sizeY7):
- if transf[i][rate_mat_index] > z[7]:
- z[7] = transf3[i][rate_mat_index_3w] # Max flow in 3w transformers
- for i in range(sizeY2):
- if lines[i][rate_mat_index] > 90:
- z[8] += 1
- z[8] = z[8] - z[0] # Number of lines between 90% and 100% of their limits
- for i in range(sizeY5):
- if transf[i][rate_mat_index] > 90:
- z[9] += 1
- for i in range(sizeY7):
- if transf3[i][rate_mat_index_3w] > 90:
- z[9] += 1
- z[9] = z[9] - z[1] # Number of transformers between 90% and 100% of their limits
-
- z[10] = indicFS
- z[11] = indicLS
-
- # Creates the output vectors
- for Pmach in range(sizeY0):
- y[Pmach] = float(plants[Pmach][3])
- Ymac[Pmach] = float(plants[Pmach][3])
- for Qmach in range(sizeY0):
- y[Qmach + sizeY0] = float(plants[Qmach][4])
- for Vbus in range(sizeY1):
- y[Vbus + 2 * sizeY0] = float(buses[Vbus][2])
- for Iline in range(sizeY2):
- y[Iline + 2 * sizeY0 + sizeY1] = float(lines[Iline][rate_mat_index])
- for Pline in range(sizeY2):
- y[Pline + 2 * sizeY0 + sizeY1 + sizeY2] = float(lines[Pline][6])
- for Qline in range(sizeY2):
- y[Qline + 2 * sizeY0 + sizeY1 + 2 * sizeY2] = float(lines[Qline][7])
- for Itrans in range(sizeY5):
- y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2] = float(transf[Itrans][rate_mat_index])
- for Ptrans in range(sizeY5):
- y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY5] = float(transf[Ptrans][6])
- for Qtrans in range(sizeY5):
- y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 2 * sizeY5] = float(transf[Qtrans][7])
- for Itrans in range(sizeY7):
- y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5] = float(transf3[Itrans][rate_mat_index_3w])
- for Ptrans in range(sizeY7):
- y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + sizeY7] = float(transf3[Ptrans][8])
- for Qtrans in range(sizeY7):
- y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 2 * sizeY7] = float(transf3[Qtrans][9])
- for Pload in range(sizeY3):
- y[Pload + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7] = float(loads[Pload][1])
- for Pmotor in range(sizeY6):
- y[Pmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3] = float(
- motors[Pmotor][1])
- for Qmotor in range(sizeY6):
- y[Qmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + sizeY6] = float(
- motors[Qmotor][2])
- for Qshunt in range(sizeY4):
- y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6] = float(
- shunt[Qshunt][4])
- for Qshunt in range(sizeY8):
- y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6 + sizeY4] = float(
- swshunt[Qshunt][4])
-
- #save OPF results in after UC scenario
- saveOPFresults(plants)
-# if opf.iopt_obj=='shd':# and indicLS > 0.1*len(loads_base):
-# # for ind in indexLS: # only act on loads that have been shed
-# # load = loads_base[ind]
-# # #if load[11].iShedding == 1: # if loadshedding allowed on the bus
-# for ind,load in enumerate(loads_base):
-# try: #disactivate triggers, save results
-# loadPscale = load[6].GetChildren(1, 'plini.Charef', 1)
-# loadQscale = load[6].GetChildren(1, 'qlini.Charef', 1)
-# loadPscale[0].outserv = 1
-# loadQscale[0].outserv = 1
-# load[6].plini = loads[ind][1]
-# load[6].qlini = loads[ind][2]
-# except:
-# pass
-# pass
- scenario_UC.Save()
- scenario_temporaireUC0.Delete()
-
- #scenario_temporaire.Deactivate()
- #scenario_temporaire.Delete()
-
-
-
-
-if (not dico['UnitCommitment']): # or (dico['UnitCommitment'] and len(gen_UC_list) != 0): # si (pas de Unitcommitment) ou (avec UC et il y a au moins un groupe desactive)
-
-
- #scenario_temporaire0.Activate() #scenario de base
-
- app.SaveAsScenario('Case_' + str(nn), 1) # creer scenario pour sauvegarder le cas de base
- scenario = app.GetActiveScenario()
- scenario.Activate()
-
-
-
- opf = app.GetFromStudyCase('ComOpf')
-
- opf.iInit = 0
-
-
- erropf = opf.Execute()
- # Traitement specifique pour resoudre des cas difficle a converger
- if (erropf == 1) and (PFParams['OBJECTIVE_FUNCTION'] == 'MINIMISATION_OF_COST') and PFParams['NON_COST_OPTIMAL_SOLUTION_ALLOWED']:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- ldf = app.GetFromStudyCase('ComLdf')
- ldf.iopt_initOPF = 1 # utiliser pour OPF
- ldf.Execute()
- opf.iInit = 1
- erropf = opf.Execute() # lancer opf avec 'cst'
- print(' Run LDF for OPF ')
- if erropf == 0: print(' OK grace a LDF initial ')
- else:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- aa = 0
- while erropf == 1: # si cst ne marche pas
- scenario_temporaire0.Apply(0)#recuperer scenario initiale
- aa += 1
- opf.iopt_obj = 'los' # Fonction objectif = minimisation de la perte totale du reseau
- erropf = opf.Execute() # run opf los
- if erropf == 1:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- print(' flat-stat to OPF loss ! ! ! ')
- opf.iInit = 0 # flatstart opf loss
- erropf = opf.Execute()
- if erropf == 1:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- break
- opf.iInit = 1
- print(' Run OPF loss OK ')
- if erropf == 0: # si los marche bien
- if (aa == 2)and(LS_allowed):
- opf.iopt_obj = 'shd'
- opf.Execute()
- if aa == 3:
- # print(' ++++++++++++++++++++++++++++prendre le resultat du OPF LOSS')
- # erropf = 1
- # scenario_temporaire0.Apply(0) # recuperer scenario initiale
-
- filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_LOSS' + '.shd', 'w')
- #filew = open(tempdir + '/Case_' + str(nn) + '_LOSS' + '.shd', 'w')
- filew.write('Case_' + str(nn))
- filew.close()
- break
- opf.iopt_obj = 'cst'
- erropf = opf.Execute() # relancer opt cst
- if erropf == 0:
- if (aa == 2)and(LS_allowed):
- print(' ==================== basculer los-shd')
- else:
- print(' OK grace a OPF LOSS =======================LOSS in case aa=' + str(aa))
- if (erropf==1)and(LS_allowed):
- aa = 0
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- ldf.Execute() # initiale valeur pour opf shd
- # opf.iInit = 1
- while erropf == 1:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- aa += 1
- opf.iopt_obj = 'shd' # Fonction objectif = minimisation de la perte totale du reseau
- erropf = opf.Execute()
- if erropf == 1:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- print(' flat-stat to OPF shd ! ! ! 222 ')
- opf.iInit = 0
- erropf = opf.Execute()
- if erropf == 1:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- break
- opf.iInit = 1
- print(' Run OPF SHD ')
- if erropf == 0: # si shd marche bien
- if aa == 2:
- opf.iopt_obj = 'los'
- opf.Execute()
- if aa == 3:
- print(' +++++++++++++++++++++++++prendre le resultat du OPF SHD')
- filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_SHD' + '.shd', 'w')
- #filew = open(tempdir + '/Case_' + str(nn) + '_SHD' + '.shd', 'w')
- filew.write('Case_' + str(nn))
- filew.close()
- break
- opf.iopt_obj = 'cst'
- erropf = opf.Execute() # relancer opt cst
- if erropf == 0:
- if aa == 2:
- print('=== ========== basculer shd-los')
- # filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str( nn) + '_shdlosscost' + '.shd', 'w')
- # filew.write('Case_' + str(nn))
- # filew.close()
- else:
- print( ' OK grace a OPF SHD -------------------------------Load SHEDDING in case aa=' + str( aa))
- # filew = open( os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_shdcost' + '.shd', 'w')
- # filew.write('Case_' + str(nn))
- # filew.close()
-
- # Fin du traitement specifique pour resoudre des cas difficle a converger
-
- loadShed = [[], [], [], [], []]
- fxshnt = [[], [], []]
- indexLS = []
- indexFS = []
- indicLS = 0
- indicFS = 0
- flagLS = 0
- flagFS = 0
- ok = False
-
- if erropf == 0:
- ok = True
- else:
- ok = False
-
- if ok == True:
-
- all_inputs = read_pfd(app, prj.loc_name, recal=0)
- stop = time.clock();
- start = stop; # ++++++++++++++++
- buses = []
- [buses.append(bus[0:8]) for bus in all_inputs[0]]
- lines = []
- [lines.append(bus[0:11]) for bus in all_inputs[1]]
- transf = []
- [transf.append(bus[0:11]) for bus in all_inputs[2]]
- plants = []
- [plants.append(bus[0:11]) for bus in all_inputs[3]]
- loads = []
- [loads.append(bus[0:7]) for bus in all_inputs[4]]
- shunt = []
- [shunt.append(bus[0:7]) for bus in all_inputs[5]]
- motors = []
- [motors.append(bus[0:6]) for bus in all_inputs[6]]
- transf3 = []
- [transf3.append(bus[0:14]) for bus in all_inputs[7]]
- swshunt = []
- [swshunt.append(bus[0:6]) for bus in all_inputs[8]]
-
- # Extraction of the load shedding quantities
- for ii in range(len(loads)):
-
- LSscale = loads[ii][6].GetAttribute('s:scale')
- P_setpoint = loads[ii][6].GetAttribute('s:pini_set')
- LS = (1-LSscale) * P_setpoint
- if abs(LS)>0.1:
- indexLS.append(ii)
- flagLS = 1 # raise flag loadshedding
- loadShed[0].append(nn) # Position seems to correspond to the number of the case we are treating
- loadShed[1].append(loads[ii][0]) #busnumber
- loadShed[2].append(loads[ii][4]) #busname
- loadShed[3].append(LS)
- loadShed[4].append(loads[ii][1]) #remaining load (voltage rectified)
-
-
-# if abs(loads[ii][1] - loads_base[ii][1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
-# indexLS.append(ii)
-# flagLS = 1 # raise flag loadshedding
-#
-# loadShed[0].append( nn) # Position seems to correspond to the number of the case we are treating
-# # loadShed[0].extend(['' for i in range(len(indexLS) - 1)])
-# loadShed[1].append(loads[ii][0])
-# loadShed[2].append(loads[ii][4])
-# loadShed[3].append(loads_base[ii][1] - loads[ii][1])
-# loadShed[4].append(loads[ii][1])
-
- indicLS = sum(loadShed[3]) # sum all Effective MW loads
- loadShed = list(zip(*loadShed)) # transpose the matrix
-
- for ii in range(len(shunt)):
- if abs(shunt[ii][1] - shunt_base[ii][1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
- indexFS.append(ii)
- flagFS = 1 # raise flag loadshedding
- fxshnt[0].append(nn) # Position seems to correspond to the number of the case we are treating
- # fxshnt[0].extend(['' for i in range(len(indexFS) - 1)]) # why [0] ? Maybe it would be better to have 2 lists ? Or a dict ?
- fxshnt[1].append(shunt[ii][0])
- fxshnt[2].append(shunt[ii][2])
- indicFS = sum(fxshnt[2]) # sum all Effective MW loads
- fxshnt = list(zip(*fxshnt)) # transpose the matrix
-
- # 3. Affiche Y
- # sizeY4 = len(shunt)
- y = np.zeros(2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY3 + 2 * sizeY6 + sizeY4 + sizeY8 + 3 * sizeY5 + 3 * sizeY7)
- z = np.zeros(12) # np.zeros returns a new array of the given shape and type filled with zeros
- rate_mat_index = Irate_num + 2
- rate_mat_index_3w = Irate_num + 4
- Ymac = np.zeros(sizeY0)
- if ok:
- # Creates the quantities of interest
- for i in range(sizeY2):
- if lines[i][rate_mat_index] > 100:
- z[0] += 1 # Number of lines above 100% of their limits
- for i in range(sizeY5):
- if transf[i][rate_mat_index] > 100:
- z[1] += 1 # Number of transformers above 100% of their limits
- for i in range(sizeY7):
- if transf3[i][rate_mat_index_3w] > 100:
- z[1] += 1 # Add number of 3w transformers above 100% of their limits
- for i in range(sizeY1):
- if buses[i][2] > buses[i][5]:
- z[2] += 1
- if buses[i][2] < buses[i][4]:
- z[2] += 1 # Number of buses outside of their voltage limits
- for i in range(sizeY0):
- z[3] += float(plants[i][3]) # Total active production
- for i in range(sizeY3):
- z[4] += float(loads[i][1]) # Total active consumption
- for i in range(sizeY6):
- z[4] += float(motors[i][1]) # add total active consumption from motors
- z[5] = (z[3] - z[4]) / z[3] * 100 # Active power losses
- for i in range(sizeY2):
- if lines[i][rate_mat_index] > z[6]:
- z[6] = lines[i][rate_mat_index] # Max flow in lines
- for i in range(sizeY5):
- if transf[i][rate_mat_index] > z[7]:
- z[7] = transf[i][rate_mat_index] # Max flow in transformers
- for i in range(sizeY7):
- if transf[i][rate_mat_index] > z[7]:
- z[7] = transf3[i][rate_mat_index_3w] # Max flow in 3w transformers
- for i in range(sizeY2):
- if lines[i][rate_mat_index] > 90:
- z[8] += 1
- z[8] = z[8] - z[0] # Number of lines between 90% and 100% of their limits
- for i in range(sizeY5):
- if transf[i][rate_mat_index] > 90:
- z[9] += 1
- for i in range(sizeY7):
- if transf3[i][rate_mat_index_3w] > 90:
- z[9] += 1
- z[9] = z[9] - z[1] # Number of transformers between 90% and 100% of their limits
-
- z[10] = indicFS
- z[11] = indicLS
-
- # Creates the output vectors
- for Pmach in range(sizeY0):
- y[Pmach] = float(plants[Pmach][3])
- Ymac[Pmach] = float(plants[Pmach][3])
- for Qmach in range(sizeY0):
- y[Qmach + sizeY0] = float(plants[Qmach][4])
- for Vbus in range(sizeY1):
- y[Vbus + 2 * sizeY0] = float(buses[Vbus][2])
- for Iline in range(sizeY2):
- y[Iline + 2 * sizeY0 + sizeY1] = float(lines[Iline][rate_mat_index])
- for Pline in range(sizeY2):
- y[Pline + 2 * sizeY0 + sizeY1 + sizeY2] = float(lines[Pline][6])
- for Qline in range(sizeY2):
- y[Qline + 2 * sizeY0 + sizeY1 + 2 * sizeY2] = float(lines[Qline][7])
- for Itrans in range(sizeY5):
- y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2] = float(transf[Itrans][rate_mat_index])
- for Ptrans in range(sizeY5):
- y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY5] = float(transf[Ptrans][6])
- for Qtrans in range(sizeY5):
- y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 2 * sizeY5] = float(transf[Qtrans][7])
- for Itrans in range(sizeY7):
- y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5] = float(transf3[Itrans][rate_mat_index_3w])
- for Ptrans in range(sizeY7):
- y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + sizeY7] = float(transf3[Ptrans][8])
- for Qtrans in range(sizeY7):
- y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 2 * sizeY7] = float(transf3[Qtrans][9])
- for Pload in range(sizeY3):
- y[Pload + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7] = float(loads[Pload][1])
- for Pmotor in range(sizeY6):
- y[Pmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3] = float(
- motors[Pmotor][1])
- for Qmotor in range(sizeY6):
- y[Qmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + sizeY6] = float(
- motors[Qmotor][2])
- for Qshunt in range(sizeY4):
- y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6] = float(
- shunt[Qshunt][4])
- for Qshunt in range(sizeY8):
- y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6 + sizeY4] = float(
- swshunt[Qshunt][4])
-
- saveOPFresults(plants)
-# if opf.iopt_obj=='shd': #and indicLS > 0.1*len(loads_base):
-## for ind in indexLS: # only act on loads that have been shed
-## load = loads_base[ind]
-## #if load[11].iShedding == 1: # if loadshedding allowed on the bus
-# for ind,load in enumerate(loads_base):
-# try: #disactivate triggers, save results
-# loadPscale = load[6].GetChildren(1, 'plini.Charef', 1)
-# loadQscale = load[6].GetChildren(1, 'qlini.Charef', 1)
-# loadPscale[0].outserv = 1
-# loadQscale[0].outserv = 1
-# load[6].plini = loads[ind][1]
-# load[6].qlini = loads[ind][2]
-# except:
-# pass
-
- scenario.Save()
- #scenario.Deactivate()
-
-
-scenario_temporaire0.Delete()
-
-
-res_final = [list(y), list(z), list(Ymac), indicLS, indicFS, list(loadShed),
- list(fxshnt)] # sauvegarder le resultat dans un fichier pickle
-with open(dico['doc_base'] + '/' + app.GetActiveStudyCase().loc_name + '.final', 'wb') as fichier:
- mon_pickler = pickle.Pickler(fichier, protocol=2)
- mon_pickler.dump(res_final)
-
-
-
-
-#
-#
-#res_final = [list(y), list(z), list(Ymac), indicLS, indicFS, list(loadShed),
-# list(fxshnt)] # sauvegarder le resultat dans un fichier pickle
-#with open(dico['doc_base'] + '/' + app.GetActiveStudyCase().loc_name + '.final', 'wb') as fichier:
-# mon_pickler = pickle.Pickler(fichier, protocol=2)
-# mon_pickler.dump(res_final)
-
-stop = time.clock();print(' run study cases'+' in ' + str(round(stop - start, 3)) + ' seconds');start = stop;
-# aa=1
+++ /dev/null
-############################################################
-# ojectif de ce module: calcul opf pour seulement les studycases que le calcul parallele Comtast.Execute() n'arrive pas a simuler
-############################################################
-
-import time
-import PSENconfig # file with Eficas output dictionaries
-import os,sys,pickle
-import pdb
-# from support_functionsPF import *#Valentin
-from support_functionsPF import read_pfd,read_pfd_simple,np, config_contingency
-from math import *
-import shutil
-from comfile import saveOPFresults
-
-Debug = True
-if Debug:
- sys.path.append(PSENconfig.Dico['DIRECTORY']['PF_path'])#Valentin
- os.environ['PATH'] += ';' + os.path.dirname(os.path.dirname(PSENconfig.Dico['DIRECTORY']['PF_path'])) + ';'#Valentin
-
-stop = time.clock(); start = stop;
-with open(os.path.dirname(os.path.realpath(__file__))+'/data_dico', 'rb') as fichier:
- mon_depickler = pickle.Unpickler(fichier)
- dico = mon_depickler.load()
-position = dico['position']
-LS_allowed=dico['PFParams']['LOAD_SHEDDING_ALLOWED']
-filer=open(os.path.dirname(os.path.realpath(__file__))+'/absence'+str(position)+'.txt','r')
-_cas=[]
-for line in filer:
- line=line.replace('\n', '')
- _cas.append(line)
-filer.close()
-
-##############################################################################/
-import powerfactory
-app = powerfactory.GetApplication()
-user = app.GetCurrentUser()
-prjs = user.GetContents('*.IntPrj')
-prjs.sort(key=lambda x: x.gnrl_modif, reverse=True)
-prj = prjs[0]
-prj.Activate()
-#app.Show()
-
-all_inputs_base = read_pfd_simple(app, prj.loc_name)
-plants_base = all_inputs_base[0]
-loads_base = all_inputs_base[1]
-shunt_base = all_inputs_base[2]
-swshunt_base = all_inputs_base[3]
-
-
-for cas in _cas:
- print('run studycase' + cas)
- case = prj.GetContents('Case_'+cas+'.IntCase', 1)[0]
- case.Activate()
- scenario_temporaire = app.GetActiveScenario()
- if scenario_temporaire:
- scenario_temporaire.Delete()
- fScen = app.GetProjectFolder('scen') # Dossier contient triggers
- scen = fScen.GetChildren(1, 'Base.IntScenario', 1)[0]
- scen.Activate()
-
- app.SaveAsScenario('temp0_'+cas, 1) # creer scenario pour sauvegarder le cas de base
- scenario_temporaire0 = app.GetActiveScenario()
- scenario_temporaire0.Save()
- scenario_temporaire0.Deactivate()
-
- ##########################################################
- nn = int(cas) # cas number
- settriger_iter = case.GetChildren(1, 'set_iteration.SetTrigger', 1)[0]
- # settriger_iter.ftrigger = nn
- start = time.clock();
- # with open(os.path.dirname(os.path.realpath(__file__)) + '/data_dico', 'rb') as fichier:
- # mon_depickler = pickle.Unpickler(fichier)
- # dico = mon_depickler.load()
-
- TStest = dico['TStest']
- # position = dico['position']
- PFParams = dico['PFParams']
- sizeY0 = dico['sizeY0']
- sizeY1 = dico['sizeY1']
- sizeY2 = dico['sizeY2']
- sizeY3 = dico['sizeY3']
- sizeY4 = dico['sizeY4']
- sizeY5 = dico['sizeY5']
- sizeY6 = dico['sizeY6']
- sizeY7 = dico['sizeY7']
- sizeY8 = dico['sizeY8']
- sizeY = dico['sizeY']
- gen_UC_list = []
- # if dico['PFParams']['I_MAX'] == 'RateA':
- Irate_num = 1
- # elif dico['PFParams']['I_MAX'] == 'RateB':
- # Irate_num = 2
- # elif dico['PFParams']['I_MAX'] == 'RateC':
- # Irate_num = 3
- num_pac = dico['num_pac']
- all_inputs_base = read_pfd_simple(app, prj.loc_name)
- # buses_base = all_inputs_base[0]
- # lines_base = all_inputs_base[1]
- # transf_base = all_inputs_base[2]
- plants_base = all_inputs_base[0]
- loads_base = all_inputs_base[1]
- shunt_base = all_inputs_base[2]
- # motors_base = all_inputs_base[6]
- # transf3_base = all_inputs_base[7]
- swshunt_base = all_inputs_base[3]
-
-# #reactivate load triggers
-# for load in loads_base:
-# try: #re-activate triggers if exist and disactivated
-# loadPscale = load[6].GetChildren(1, 'plini.Charef', 1)
-# loadQscale = load[6].GetChildren(1, 'qlini.Charef', 1)
-# loadPscale[0].outserv = 0
-# loadQscale[0].outserv = 0
-# except:
-# pass
-#
-# #rerun in case triggers were disactivated
-# all_inputs_base = read_pfd_simple(app, prj.loc_name)
-# # buses_base = all_inputs_base[0]
-# # lines_base = all_inputs_base[1]
-# # transf_base = all_inputs_base[2]
-# plants_base = all_inputs_base[0]
-# loads_base = all_inputs_base[1]
-# shunt_base = all_inputs_base[2]
-# # motors_base = all_inputs_base[6]
-# # transf3_base = all_inputs_base[7]
-# swshunt_base = all_inputs_base[3]
-
- # Total initial (fixed) shunt on buses
- init_shunt = 0
- for i in range(len(shunt_base)):
- init_shunt += float(shunt_base[i][2])
-
- if dico['UnitCommitment']:
- app.SaveAsScenario('Case_' + cas + '_beforeUC', 1) # creer scenario pour sauvegarder le cas de base
- scenario_beforeUC = app.GetActiveScenario()
-
- opf = app.GetFromStudyCase('ComOpf')
- erropf = opf.Execute()# lancer opf
- # Traitement specifique pour resoudre des cas difficle a converger
- if (erropf == 1) and (PFParams['OBJECTIVE_FUNCTION'] == 'MINIMISATION_OF_COST') and PFParams['NON_COST_OPTIMAL_SOLUTION_ALLOWED']:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- ldf = app.GetFromStudyCase('ComLdf')
- ldf.iopt_initOPF = 1 # utiliser pour OPF
- ldf.Execute()
- opf.iInit = 1
- erropf = opf.Execute() # lancer opf avec 'cst'
- print(' Run LDF for OPF ')
- if erropf == 0: print(' OK grace a LDF initial ')
- else:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- aa = 0
- while erropf == 1: # si cst ne marche pas
- scenario_temporaire0.Apply(0)#recuperer scenario initiale
- aa += 1
- opf.iopt_obj = 'los' # Fonction objectif = minimisation de la perte totale du reseau
- erropf = opf.Execute() # run opf los
- if erropf == 1:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- print(' flat-stat to OPF loss ! ! ! ')
- opf.iInit = 0 # flatstart opf loss
- erropf = opf.Execute()
- if erropf == 1:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- break
- opf.iInit = 1
- print(' Run OPF loss ')
- if erropf == 0: # si los marche bien
- if (aa == 2)and(LS_allowed):
- opf.iopt_obj = 'shd'
- opf.Execute()
- if aa == 3:
- # print(' ++++++++++++++++++++++++++++prendre le resultat du OPF LOSS')
- # erropf = 1
- # scenario_temporaire0.Apply(0) # recuperer scenario initiale
- filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_LOSS' + '.shdUC', 'w')
- #filew = open(tempdir + '/Case_' + str(nn)+'_LOSS' + '.shdUC','w')
- filew.write('Case_' + str(nn))
- filew.close()
- break
- opf.iopt_obj = 'cst'
- erropf = opf.Execute() # relancer opt cst
- if erropf == 0:
- if (aa == 2)and(LS_allowed):
- print(' ==================== basculer los-shd')
- else:
- print(' OK grace a OPF LOSS =======================LOSS in case aa=' + str(aa))
- if (erropf==1)and(LS_allowed):
- aa = 0
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- ldf.Execute() # initiale valeur pour opf shd
- # opf.iInit = 1
- while erropf == 1:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- aa += 1
- opf.iopt_obj = 'shd' # Fonction objectif = minimisation de la perte totale du reseau
- erropf = opf.Execute()
- if erropf == 1:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- print(' flat-stat to OPF shd ! ! ! 222 ')
- opf.iInit = 0
- erropf = opf.Execute()
- if erropf == 1:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- break
- opf.iInit = 1
- print(' Run OPF SHD ')
- if erropf == 0: # si shd marche bien
- if aa == 2:
- opf.iopt_obj = 'los'
- opf.Execute()
- if aa == 3:
- print(' +++++++++++++++++++++++++prendre le resultat du OPF SHD')
- filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_SHD' + '.shdUC', 'w')
- #filew = open(tempdir + '/Case_' + str(nn)+'_SHD' + '.shdUC','w')
- filew.write('Case_' + str(nn))
- filew.close()
- break
- opf.iopt_obj = 'cst'
- erropf = opf.Execute() # relancer opt cst
- if erropf == 0:
- if aa == 2:
- print('=== ========== basculer shd-los')
- # filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(
- # nn) + '_shdlosscost' + '.shdUC', 'w')
- # filew.write('Case_' + str(nn))
- # filew.close()
- else:
- print(
- ' OK grace a OPF SHD -------------------------------Load SHEDDING in case aa=' + str(aa))
- # filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_shdcost' + '.shdUC','w')
- # filew.write('Case_' + str(nn))
- # filew.close()
-
-
- loadShed = [[], [], [], [], []]
- fxshnt = [[], [], []]
- indexLS = []
- indexFS = []
- indicLS = 0
- indicFS = 0
- flagLS = 0
- flagFS = 0
- ok = False
-
- if erropf == 0:
- ok = True
- else:
- ok = False
-
- if ok == True:
-
- all_inputs = read_pfd(app, prj.loc_name, recal=0)
-
- # start = stop; # ++++++++++++++++
- buses = []
- [buses.append(bus[0:8]) for bus in all_inputs[0]]
- lines = []
- [lines.append(bus[0:11]) for bus in all_inputs[1]]
- transf = []
- [transf.append(bus[0:11]) for bus in all_inputs[2]]
- plants = []
- [plants.append(bus[0:12]) for bus in all_inputs[3]]
- loads = []
- [loads.append(bus[0:7]) for bus in all_inputs[4]]
- shunt = []
- [shunt.append(bus[0:7]) for bus in all_inputs[5]]
- motors = []
- [motors.append(bus[0:6]) for bus in all_inputs[6]]
- transf3 = []
- [transf3.append(bus[0:14]) for bus in all_inputs[7]]
- swshunt = []
- [swshunt.append(bus[0:6]) for bus in all_inputs[8]]
-
- # Extraction of the load shedding quantities
- for ii in range(len(loads)):
-
- LSscale = loads[ii][6].GetAttribute('s:scale')
- P_setpoint = loads[ii][6].GetAttribute('s:pini_set')
- LS = (1-LSscale) * P_setpoint
- if abs(LS)>0.1:
- indexLS.append(ii)
- flagLS = 1 # raise flag loadshedding
- loadShed[0].append(position) # Position seems to correspond to the number of the case we are treating
- loadShed[1].append(loads[ii][0]) #busnumber
- loadShed[2].append(loads[ii][4]) #busname
- loadShed[3].append(LS)
- loadShed[4].append(loads[ii][1]) #remaining load (voltage rectified)
-
-# if (loads[ii][1] - loads_base[ii][
-# 1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
-# indexLS.append(ii)
-# flagLS = 1 # raise flag loadshedding
-#
-# loadShed[0].append(
-# position) # Position seems to correspond to the number of the case we are treating
-# loadShed[0].extend(['' for i in range(len(indexLS) - 1)])
-# loadShed[1].append(loads[ii][0])
-# loadShed[2].append(loads[ii][4])
-# loadShed[3].append(loads_base[ii][1] - loads[ii][1])
-# loadShed[4].append(loads[ii][1])
-
-
- indicLS = sum(loadShed[3]) # sum all Effective MW loads
- loadShed = list(zip(*loadShed)) # transpose the matrix
-
- for ii in range(len(shunt)):
- if (shunt[ii][1] - shunt_base[ii][
- 1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
- indexFS.append(ii)
- flagFS = 1 # raise flag loadshedding
- fxshnt[0].append(position) # Position seems to correspond to the number of the case we are treating
- fxshnt[0].extend(['' for i in range(
- len(indexFS) - 1)])
- fxshnt[1].append(shunt[ii][0])
- fxshnt[2].append(shunt[ii][2])
- indicFS = sum(fxshnt[2]) # sum all Effective MW loads
- fxshnt = list(zip(*fxshnt)) # transpose the matrix
-
- #save OPF results in study case before disconnecting gens
- saveOPFresults(plants)
-# if opf.iopt_obj=='shd':# and indicLS > 0.1*len(loads_base):
-# # for ind in indexLS: # only act on loads that have been shed
-# # load = loads_base[ind]
-# # #if load[11].iShedding == 1: # if loadshedding allowed on the bus
-# for ind,load in enumerate(loads_base):
-# try: #disactivate triggers, save results
-# loadPscale = load[6].GetChildren(1, 'plini.Charef', 1)
-# loadQscale = load[6].GetChildren(1, 'qlini.Charef', 1)
-# loadPscale[0].outserv = 1
-# loadQscale[0].outserv = 1
-# load[6].plini = loads[ind][1]
-# load[6].qlini = loads[ind][2]
-# except:
-# pass
- scenario_beforeUC.Save()
-
- #scenario_beforeUC.Deactivate()
-
-
- #gen_UC_list = []
- for item in plants:
- bus = item[0]
- status = item[1]
- _id = item[2]
- pgen = item[3]
- pmax = item[6]
- try: #will only work for synchronous machines
- pdispatch = item[11].ictpg
- except:
- pdispatch=0
- if int(pdispatch)==1 and (abs(pgen) <= pmax * 0.02): # if generates at less than 2% of Pmax
- #if (abs(pgen) <= pmax * 0.02):
- if status == 0:
- if not gen_UC_list: #len(gen_UC_list)==0:
- app.SaveAsScenario('Case_' + str(nn), 1) # creer scenario pour sauvegarder les disponibilites des generateurs
- scenario_UC = app.GetActiveScenario()
- # disconnect the plant
- for plant in plants_base: # chercher l'objet represente generateur
- if (plant[0] == bus) and (plant[2] == _id) and (
- plant[11].ip_ctrl != 1): #and plant[11].ictpg==1: # not reference bus
- plant[11].outserv = 1 # desactiver le groupe
- outs = plant[11].GetChildren(1, 'outserv.Charef', 1)
- if outs:
- outs[0].outserv = 1 # desactive Trigger outserv pour etre sure que le groupe va etre desactive
- gen_UC_list.append((bus, _id))
-
- if gen_UC_list: #len(gen_UC_list)!=0:
- scenario_UC.Save()
- app.SaveAsScenario('tempUC0_'+cas, 1) # creer scenario pour sauvegarder le cas de base
- scenario_temporaireUC0=app.GetActiveScenario()
- scenario_temporaireUC0.Save()
- scenario_temporaireUC0.Deactivate()
-# scenario_temporaireUC0 = scenarioUC
-
- # 3. Affiche Y
- # sizeY4 = len(shunt)
- y = np.zeros(2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY3 + 2 * sizeY6 + sizeY4 + sizeY8 + 3 * sizeY5 + 3 * sizeY7)
- z = [0] * 13
- rate_mat_index = Irate_num + 2
- rate_mat_index_3w = Irate_num + 4
- Ymac = np.zeros(sizeY0)
- if ok:
- # Creates the quantities of interest
- for i in range(sizeY2):
- if lines[i][rate_mat_index] > 100:
- z[0] += 1 # Number of lines above 100% of their limits
- for i in range(sizeY5):
- if transf[i][rate_mat_index] > 100:
- z[1] += 1 # Number of transformers above 100% of their limits
- for i in range(sizeY7):
- if transf3[i][rate_mat_index_3w] > 100:
- z[1] += 1 # Add number of 3w transformers above 100% of their limits
- for i in range(sizeY1):
- if buses[i][2] > buses[i][5]:
- z[2] += 1
- if buses[i][2] < buses[i][4]:
- z[2] += 1 # Number of buses outside of their voltage limits
- for i in range(sizeY0):
- z[3] += float(plants[i][3]) # Total active production
- for i in range(sizeY3):
- z[4] += float(loads[i][1]) # Total active consumption
- for i in range(sizeY6):
- z[4] += float(motors[i][1]) # add total active consumption from motors
- z[5] = (z[3] - z[4]) / z[3] * 100 # Active power losses
- for i in range(sizeY2):
- if lines[i][rate_mat_index] > z[6]:
- z[6] = lines[i][rate_mat_index] # Max flow in lines
- for i in range(sizeY5):
- if transf[i][rate_mat_index] > z[7]:
- z[7] = transf[i][rate_mat_index] # Max flow in transformers
- for i in range(sizeY7):
- if transf[i][rate_mat_index] > z[7]:
- z[7] = transf3[i][rate_mat_index_3w] # Max flow in 3w transformers
- for i in range(sizeY2):
- if lines[i][rate_mat_index] > 90:
- z[8] += 1
- z[8] = z[8] - z[0] # Number of lines between 90% and 100% of their limits
- for i in range(sizeY5):
- if transf[i][rate_mat_index] > 90:
- z[9] += 1
- for i in range(sizeY7):
- if transf3[i][rate_mat_index_3w] > 90:
- z[9] += 1
- z[9] = z[9] - z[1] # Number of transformers between 90% and 100% of their limits
-
- z[10] = indicFS
- z[11] = indicLS
- z[12] = str(gen_UC_list)
-
- # Creates the output vectors
- for Pmach in range(sizeY0):
- y[Pmach] = float(plants[Pmach][3])
- Ymac[Pmach] = float(plants[Pmach][3])
- for Qmach in range(sizeY0):
- y[Qmach + sizeY0] = float(plants[Qmach][4])
- for Vbus in range(sizeY1):
- y[Vbus + 2 * sizeY0] = float(buses[Vbus][2])
- for Iline in range(sizeY2):
- y[Iline + 2 * sizeY0 + sizeY1] = float(lines[Iline][rate_mat_index])
- for Pline in range(sizeY2):
- y[Pline + 2 * sizeY0 + sizeY1 + sizeY2] = float(lines[Pline][6])
- for Qline in range(sizeY2):
- y[Qline + 2 * sizeY0 + sizeY1 + 2 * sizeY2] = float(lines[Qline][7])
- for Itrans in range(sizeY5):
- y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2] = float(transf[Itrans][rate_mat_index])
- for Ptrans in range(sizeY5):
- y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY5] = float(transf[Ptrans][6])
- for Qtrans in range(sizeY5):
- y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 2 * sizeY5] = float(transf[Qtrans][7])
- for Itrans in range(sizeY7):
- y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5] = float(
- transf3[Itrans][rate_mat_index_3w])
- for Ptrans in range(sizeY7):
- y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + sizeY7] = float(transf3[Ptrans][8])
- for Qtrans in range(sizeY7):
- y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 2 * sizeY7] = float(transf3[Qtrans][9])
- for Pload in range(sizeY3):
- y[Pload + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7] = float(loads[Pload][1])
- for Pmotor in range(sizeY6):
- y[Pmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3] = float(
- motors[Pmotor][1])
- for Qmotor in range(sizeY6):
- y[Qmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + sizeY6] = float(
- motors[Qmotor][2])
- for Qshunt in range(sizeY4):
- y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6] = float(
- shunt[Qshunt][4])
- for Qshunt in range(sizeY8):
- y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6 + sizeY4] = float(
- swshunt[Qshunt][4])
-
- # nz = len(z)
- #scenario_temporaireUC.Deactivate()
- #scenario_temporaireUC.Delete()
-
- res_beforeUC = [list(y), list(z), list(Ymac), indicLS, indicFS, list(loadShed),
- list(fxshnt)] # sauvegarder le resultat dans un fichier pickle
-
- with open(dico['doc_base'] + '/' + app.GetActiveStudyCase().loc_name + '.before', 'wb') as fichier:
- mon_pickler = pickle.Pickler(fichier, protocol=2)
- mon_pickler.dump(res_beforeUC)
-
- if len(gen_UC_list) == 0:
- del z[-1]
- #change scenario name
- scenario_beforeUCpost=app.GetActiveScenario()
- app.SaveAsScenario('Case_' + str(nn), 1) # creer scenario pour sauvegarder le cas de base
- #scenario_beforeUCpost.Save()
- scenario_beforeUC.Delete()
-
-
- #copy No cost OPF convergence cases for post-UC as well, because no additional treatment was done.
- for filename in os.listdir(os.path.dirname(os.path.realpath(__file__))):
- #for filename in os.listdir(tempdir):
- if filename.endswith('.shdUC'):
- #filew = open(os.path.dirname(os.path.realpath(__file__)) + filename + 'UC','w')
- shutil.copy2(os.path.join(os.path.dirname(os.path.realpath(__file__)), filename), os.path.join(os.path.dirname(os.path.realpath(__file__)),filename[0:-2]))
- #shutil.copy2(os.path.join(tempdir, filename), os.path.join(tempdir,filename[0:-2]))
- #filew.close()
-
- #----------------------------------RE-run after unit commitment step--------------------------------------------------
- if len(gen_UC_list)!=0:
-
- #scenario_temporaire0.Activate()
-
- #scenario_temporaire0.Apply(0)
- #scenario_UC.Apply(0)
- scenario_UC.Activate()
-
- #app.SaveAsScenario('temp' + cas, 1) # creer scenario pour sauvegarder le cas de base
- #scenario_temporaire = app.GetActiveScenario()
- opf = app.GetFromStudyCase('ComOpf')
-
- opf.iInit = 0
- erropf = opf.Execute()
- # Traitement specifique pour resoudre des cas difficle a converger
- if (erropf == 1) and (PFParams['OBJECTIVE_FUNCTION'] == 'MINIMISATION_OF_COST') and PFParams['NON_COST_OPTIMAL_SOLUTION_ALLOWED']:
- scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
- ldf = app.GetFromStudyCase('ComLdf')
- ldf.iopt_initOPF = 1 # utiliser pour OPF
- ldf.Execute()
- opf.iInit = 1
- erropf = opf.Execute() # lancer opf avec 'cst'
- print(' Run LDF for OPF ')
- if erropf == 0: print(' OK grace a LDF initial ')
- else:
- scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
- aa = 0
- while erropf == 1: # si cst ne marche pas
- scenario_temporaireUC0.Apply(0)#recuperer scenario initiale
- aa += 1
- opf.iopt_obj = 'los' # Fonction objectif = minimisation de la perte totale du reseau
- erropf = opf.Execute() # run opf los
- if erropf == 1:
- scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
- print(' flat-stat to OPF loss ! ! ! ')
- opf.iInit = 0 # flatstart opf loss
- erropf = opf.Execute()
- if erropf == 1:
- scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
- break
- opf.iInit = 1
- print(' Run OPF loss OK ')
- if erropf == 0: # si los marche bien
- if (aa == 2)and(LS_allowed):
- opf.iopt_obj = 'shd'
- opf.Execute()
- if aa == 3:
- # print(' ++++++++++++++++++++++++++++prendre le resultat du OPF LOSS')
- # erropf = 1
- # scenario_temporaire0.Apply(0) # recuperer scenario initiale
- filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_LOSS' + '.shd', 'w')
- #filew = open(tempdir + '/Case_' + str(nn)+'_LOSS' + '.shd','w')
- filew.write('Case_' + str(nn))
- filew.close()
- break
- opf.iopt_obj = 'cst'
- erropf = opf.Execute() # relancer opt cst
- if erropf == 0:
- if (aa == 2)and(LS_allowed):
- print(' ==================== basculer los-shd')
- else:
- print(' OK grace a OPF LOSS =======================LOSS in case aa=' + str(aa))
- if (erropf==1)and(LS_allowed):
- aa = 0
- scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
- ldf.Execute() # initiale valeur pour opf shd
- # opf.iInit = 1
- while erropf == 1:
- scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
- aa += 1
- opf.iopt_obj = 'shd' # Fonction objectif = minimisation de la perte totale du reseau
- erropf = opf.Execute()
- if erropf == 1:
- scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
- print(' flat-stat to OPF shd ! ! ! 222 ')
- opf.iInit = 0
- erropf = opf.Execute()
- if erropf == 1:
- scenario_temporaireUC0.Apply(0) # recuperer scenario initiale
- break
- opf.iInit = 1
- print(' Run OPF SHD ')
- if erropf == 0: # si shd marche bien
- if aa == 2:
- opf.iopt_obj = 'los'
- opf.Execute()
- if aa == 3:
- print(' +++++++++++++++++++++++++prendre le resultat du OPF SHD')
- filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_SHD' + '.shd','w')
- #filew = open(tempdir + '/Case_' + str(nn)+'_SHD' + '.shd','w')
- filew.write('Case_' + str(nn))
- filew.close()
- break
- opf.iopt_obj = 'cst'
- erropf = opf.Execute() # relancer opt cst
- if erropf == 0:
- if aa == 2:
- print('=== ========== basculer shd-los')
- # filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(
- # nn) + '_shdlosscost' + '.shd', 'w')
- # filew.write('Case_' + str(nn))
- # filew.close()
- else:
- print(
- ' OK grace a OPF SHD -------------------------------Load SHEDDING in case aa=' + str(
- aa))
- # filew = open( os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_shdcost' + '.shd', 'w')
- # filew.write('Case_' + str(nn))
- # filew.close()
- # Fin du traitement specifique pour resoudre des cas difficle a converger
-
- loadShed = [[], [], [], [], []]
- fxshnt = [[], [], []]
- indexLS = []
- indexFS = []
- indicLS = 0
- indicFS = 0
- flagLS = 0
- flagFS = 0
- ok = False
-
- if erropf == 0:
- ok = True
- else:
- ok = False
-
- if ok == True:
-
- all_inputs = read_pfd(app, prj.loc_name, recal=0)
- stop = time.clock();
- start = stop; # ++++++++++++++++
- buses = []
- [buses.append(bus[0:8]) for bus in all_inputs[0]]
- lines = []
- [lines.append(bus[0:11]) for bus in all_inputs[1]]
- transf = []
- [transf.append(bus[0:11]) for bus in all_inputs[2]]
- plants = []
- [plants.append(bus[0:11]) for bus in all_inputs[3]]
- loads = []
- [loads.append(bus[0:7]) for bus in all_inputs[4]]
- shunt = []
- [shunt.append(bus[0:7]) for bus in all_inputs[5]]
- motors = []
- [motors.append(bus[0:6]) for bus in all_inputs[6]]
- transf3 = []
- [transf3.append(bus[0:14]) for bus in all_inputs[7]]
- swshunt = []
- [swshunt.append(bus[0:6]) for bus in all_inputs[8]]
-
- # Extraction of the load shedding quantities
- for ii in range(len(loads)):
-
- LSscale = loads[ii][6].GetAttribute('s:scale')
- P_setpoint = loads[ii][6].GetAttribute('s:pini_set')
- LS = (1-LSscale) * P_setpoint
- if abs(LS)>0.1:
- indexLS.append(ii)
- flagLS = 1 # raise flag loadshedding
- loadShed[0].append(position) # Position seems to correspond to the number of the case we are treating
- loadShed[1].append(loads[ii][0]) #busnumber
- loadShed[2].append(loads[ii][4]) #busname
- loadShed[3].append(LS)
- loadShed[4].append(loads[ii][1]) #remaining load (voltage rectified)
-
-# if (loads[ii][1] - loads_base[ii][
-# 1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
-# indexLS.append(ii)
-# flagLS = 1 # raise flag loadshedding
-#
-# loadShed[0].append(
-# position) # Position seems to correspond to the number of the case we are treating
-# #loadShed[0].extend(['' for i in range(len(indexLS) - 1)])
-# loadShed[1].append(loads[ii][0])
-# loadShed[2].append(loads[ii][4])
-# loadShed[3].append(loads_base[ii][1] - loads[ii][1])
-# loadShed[4].append(loads[ii][1])
-
-
- indicLS = sum(loadShed[3]) # sum all Effective MW loads
- loadShed = list(zip(*loadShed)) # transpose the matrix
-
- for ii in range(len(shunt)):
- if (shunt[ii][1] - shunt_base[ii][
- 1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
- indexFS.append(ii)
- flagFS = 1 # raise flag loadshedding
- fxshnt[0].append(position) # Position seems to correspond to the number of the case we are treating
- fxshnt[0].extend(['' for i in range(
- len(indexFS) - 1)]) # why [0] ? Maybe it would be better to have 2 lists ? Or a dict ?
- fxshnt[1].append(shunt[ii][0])
- fxshnt[2].append(shunt[ii][2])
- indicFS = sum(fxshnt[2]) # sum all Effective MW loads
- fxshnt = list(zip(*fxshnt)) # transpose the matrix
-
- # 3. Affiche Y
- # sizeY4 = len(shunt)
- y = np.zeros(2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY3 + 2 * sizeY6 + sizeY4 + sizeY8 + 3 * sizeY5 + 3 * sizeY7)
- z = np.zeros(12) # np.zeros returns a new array of the given shape and type filled with zeros
- rate_mat_index = Irate_num + 2
- rate_mat_index_3w = Irate_num + 4
- Ymac = np.zeros(sizeY0)
- if ok:
- # Creates the quantities of interest
- for i in range(sizeY2):
- if lines[i][rate_mat_index] > 100:
- z[0] += 1 # Number of lines above 100% of their limits
- for i in range(sizeY5):
- if transf[i][rate_mat_index] > 100:
- z[1] += 1 # Number of transformers above 100% of their limits
- for i in range(sizeY7):
- if transf3[i][rate_mat_index_3w] > 100:
- z[1] += 1 # Add number of 3w transformers above 100% of their limits
- for i in range(sizeY1):
- if buses[i][2] > buses[i][5]:
- z[2] += 1
- if buses[i][2] < buses[i][4]:
- z[2] += 1 # Number of buses outside of their voltage limits
- for i in range(sizeY0):
- z[3] += float(plants[i][3]) # Total active production
- for i in range(sizeY3):
- z[4] += float(loads[i][1]) # Total active consumption
- for i in range(sizeY6):
- z[4] += float(motors[i][1]) # add total active consumption from motors
- z[5] = (z[3] - z[4]) / z[3] * 100 # Active power losses
- for i in range(sizeY2):
- if lines[i][rate_mat_index] > z[6]:
- z[6] = lines[i][rate_mat_index] # Max flow in lines
- for i in range(sizeY5):
- if transf[i][rate_mat_index] > z[7]:
- z[7] = transf[i][rate_mat_index] # Max flow in transformers
- for i in range(sizeY7):
- if transf[i][rate_mat_index] > z[7]:
- z[7] = transf3[i][rate_mat_index_3w] # Max flow in 3w transformers
- for i in range(sizeY2):
- if lines[i][rate_mat_index] > 90:
- z[8] += 1
- z[8] = z[8] - z[0] # Number of lines between 90% and 100% of their limits
- for i in range(sizeY5):
- if transf[i][rate_mat_index] > 90:
- z[9] += 1
- for i in range(sizeY7):
- if transf3[i][rate_mat_index_3w] > 90:
- z[9] += 1
- z[9] = z[9] - z[1] # Number of transformers between 90% and 100% of their limits
-
- z[10] = indicFS
- z[11] = indicLS
-
- # Creates the output vectors
- for Pmach in range(sizeY0):
- y[Pmach] = float(plants[Pmach][3])
- Ymac[Pmach] = float(plants[Pmach][3])
- for Qmach in range(sizeY0):
- y[Qmach + sizeY0] = float(plants[Qmach][4])
- for Vbus in range(sizeY1):
- y[Vbus + 2 * sizeY0] = float(buses[Vbus][2])
- for Iline in range(sizeY2):
- y[Iline + 2 * sizeY0 + sizeY1] = float(lines[Iline][rate_mat_index])
- for Pline in range(sizeY2):
- y[Pline + 2 * sizeY0 + sizeY1 + sizeY2] = float(lines[Pline][6])
- for Qline in range(sizeY2):
- y[Qline + 2 * sizeY0 + sizeY1 + 2 * sizeY2] = float(lines[Qline][7])
- for Itrans in range(sizeY5):
- y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2] = float(transf[Itrans][rate_mat_index])
- for Ptrans in range(sizeY5):
- y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY5] = float(transf[Ptrans][6])
- for Qtrans in range(sizeY5):
- y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 2 * sizeY5] = float(transf[Qtrans][7])
- for Itrans in range(sizeY7):
- y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5] = float(transf3[Itrans][rate_mat_index_3w])
- for Ptrans in range(sizeY7):
- y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + sizeY7] = float(transf3[Ptrans][8])
- for Qtrans in range(sizeY7):
- y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 2 * sizeY7] = float(transf3[Qtrans][9])
- for Pload in range(sizeY3):
- y[Pload + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7] = float(loads[Pload][1])
- for Pmotor in range(sizeY6):
- y[Pmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3] = float(
- motors[Pmotor][1])
- for Qmotor in range(sizeY6):
- y[Qmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + sizeY6] = float(
- motors[Qmotor][2])
- for Qshunt in range(sizeY4):
- y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6] = float(
- shunt[Qshunt][4])
- for Qshunt in range(sizeY8):
- y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6 + sizeY4] = float(
- swshunt[Qshunt][4])
-
- saveOPFresults(plants)
-# if opf.iopt_obj=='shd':# and indicLS > 0.1*len(loads_base):
-# # for ind in indexLS: # only act on loads that have been shed
-# # load = loads_base[ind]
-# # #if load[11].iShedding == 1: # if loadshedding allowed on the bus
-# for ind,load in enumerate(loads_base):
-# try: #disactivate triggers, save results
-# loadPscale = load[6].GetChildren(1, 'plini.Charef', 1)
-# loadQscale = load[6].GetChildren(1, 'qlini.Charef', 1)
-# loadPscale[0].outserv = 1
-# loadQscale[0].outserv = 1
-# load[6].plini = loads[ind][1]
-# load[6].qlini = loads[ind][2]
-# except:
-# pass
-#
- scenario_UC.Save()
- scenario_temporaireUC0.Delete()
-
- if (not dico['UnitCommitment']): # or (dico['UnitCommitment'] and len(gen_UC_list) != 0): # si (pas de Unitcommitment) ou (avec UC et il y a au moins un groupe desactive)
-
- #scenario_temporaire0.Activate()
-
- #if len(gen_UC_list)!=0:# deja desactive au moin 1 generateur
- # scenario_temporaire0.Activate()
- #scenario_UC.Apply(0)
-
- app.SaveAsScenario('Case_' + cas, 1) # creer scenario pour sauvegarder le cas de base
- scenario = app.GetActiveScenario()
- scenario.Activate()
-
-
- opf = app.GetFromStudyCase('ComOpf')
- opf.iInit = 0
-
-
- erropf = opf.Execute()
- # Traitement specifique pour resoudre des cas difficle a converger
- if (erropf == 1) and (PFParams['OBJECTIVE_FUNCTION'] == 'MINIMISATION_OF_COST') and PFParams['NON_COST_OPTIMAL_SOLUTION_ALLOWED']:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- ldf = app.GetFromStudyCase('ComLdf')
- ldf.iopt_initOPF = 1 # utiliser pour OPF
- ldf.Execute()
- opf.iInit = 1
- erropf = opf.Execute() # lancer opf avec 'cst'
- print(' Run LDF for OPF ')
- if erropf == 0: print(' OK grace a LDF initial ')
- else:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- aa = 0
- while erropf == 1: # si cst ne marche pas
- scenario_temporaire0.Apply(0)#recuperer scenario initiale
- aa += 1
- opf.iopt_obj = 'los' # Fonction objectif = minimisation de la perte totale du reseau
- erropf = opf.Execute() # run opf los
- if erropf == 1:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- print(' flat-stat to OPF loss ! ! ! ')
- opf.iInit = 0 # flatstart opf loss
- erropf = opf.Execute()
- if erropf == 1:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- break
- opf.iInit = 1
- print(' Run OPF loss OK ')
- if erropf == 0: # si los marche bien
- if (aa == 2)and(LS_allowed):
- opf.iopt_obj = 'shd'
- opf.Execute()
- if aa == 3:
- # print(' ++++++++++++++++++++++++++++prendre le resultat du OPF LOSS')
- # erropf = 1
- # scenario_temporaire0.Apply(0) # recuperer scenario initiale
- filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_LOSS' + '.shd', 'w')
- #filew = open(tempdir + '/Case_' + str(nn)+'_LOSS' + '.shd','w')
- filew.write('Case_' + str(nn))
- filew.close()
- break
- opf.iopt_obj = 'cst'
- erropf = opf.Execute() # relancer opt cst
- if erropf == 0:
- if (aa == 2)and(LS_allowed):
- print(' ==================== basculer los-shd')
- else:
- print(' OK grace a OPF LOSS =======================LOSS in case aa=' + str(aa))
- if (erropf==1)and(LS_allowed):
- aa = 0
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- ldf.Execute() # initiale valeur pour opf shd
- # opf.iInit = 1
- while erropf == 1:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- aa += 1
- opf.iopt_obj = 'shd' # Fonction objectif = minimisation de la perte totale du reseau
- erropf = opf.Execute()
- if erropf == 1:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- print(' flat-stat to OPF shd ! ! ! 222 ')
- opf.iInit = 0
- erropf = opf.Execute()
- if erropf == 1:
- scenario_temporaire0.Apply(0) # recuperer scenario initiale
- break
- opf.iInit = 1
- print(' Run OPF SHD ')
- if erropf == 0: # si shd marche bien
- if aa == 2:
- opf.iopt_obj = 'los'
- opf.Execute()
- if aa == 3:
- print(' +++++++++++++++++++++++++prendre le resultat du OPF SHD')
- filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_SHD' + '.shd','w')
- #filew = open(tempdir + '/Case_' + str(nn)+'_SHD' + '.shd','w')
- filew.write('Case_' + str(nn))
- filew.close()
- break
- opf.iopt_obj = 'cst'
- erropf = opf.Execute() # relancer opt cst
- if erropf == 0:
- if aa == 2:
- print('=== ========== basculer shd-los')
- # filew = open(os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(
- # nn) + '_shdlosscost' + '.shd', 'w')
- # filew.write('Case_' + str(nn))
- # filew.close()
- else:
- print(
- ' OK grace a OPF SHD -------------------------------Load SHEDDING in case aa=' + str(
- aa))
- # filew = open( os.path.dirname(os.path.realpath(__file__)) + '/Case_' + str(nn) + '_shdcost' + '.shd', 'w')
- # filew.write('Case_' + str(nn))
- # filew.close()
- # Fin du traitement specifique pour resoudre des cas difficle a converger
-
- loadShed = [[], [], [], [], []]
- fxshnt = [[], [], []]
- indexLS = []
- indexFS = []
- indicLS = 0
- indicFS = 0
- flagLS = 0
- flagFS = 0
- ok = False
-
- if erropf == 0:
- ok = True
- else:
- ok = False
-
- if ok == True:
-
- all_inputs = read_pfd(app, prj.loc_name, recal=0)
- stop = time.clock();
- start = stop; # ++++++++++++++++
- buses = []
- [buses.append(bus[0:8]) for bus in all_inputs[0]]
- lines = []
- [lines.append(bus[0:11]) for bus in all_inputs[1]]
- transf = []
- [transf.append(bus[0:11]) for bus in all_inputs[2]]
- plants = []
- [plants.append(bus[0:11]) for bus in all_inputs[3]]
- loads = []
- [loads.append(bus[0:7]) for bus in all_inputs[4]]
- shunt = []
- [shunt.append(bus[0:7]) for bus in all_inputs[5]]
- motors = []
- [motors.append(bus[0:6]) for bus in all_inputs[6]]
- transf3 = []
- [transf3.append(bus[0:14]) for bus in all_inputs[7]]
- swshunt = []
- [swshunt.append(bus[0:6]) for bus in all_inputs[8]]
-
- # Extraction of the load shedding quantities
- for ii in range(len(loads)):
-
- LSscale = loads[ii][6].GetAttribute('s:scale')
- P_setpoint = loads[ii][6].GetAttribute('s:pini_set')
- LS = (1-LSscale) * P_setpoint
- if abs(LS)>0.1:
- indexLS.append(ii)
- flagLS = 1 # raise flag loadshedding
- loadShed[0].append(position) # Position seems to correspond to the number of the case we are treating
- loadShed[1].append(loads[ii][0]) #busnumber
- loadShed[2].append(loads[ii][4]) #busname
- loadShed[3].append(LS)
- loadShed[4].append(loads[ii][1]) #remaining load (voltage rectified)
-
-# if (loads[ii][1] - loads_base[ii][1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
-# indexLS.append(ii)
-# flagLS = 1 # raise flag loadshedding
-#
-# loadShed[0].append(
-# position) # Position seems to correspond to the number of the case we are treating
-# loadShed[0].extend(['' for i in range(len(indexLS) - 1)])
-# loadShed[1].append(loads[ii][0])
-# loadShed[2].append(loads[ii][4])
-# loadShed[3].append(loads_base[ii][1] - loads[ii][1])
-# loadShed[4].append(loads[ii][1])
-
-
- indicLS = sum(loadShed[3]) # sum all Effective MW loads
- loadShed = list(zip(*loadShed)) # transpose the matrix
-
-
-
- for ii in range(len(shunt)):
- if (shunt[ii][1] - shunt_base[ii][
- 1]) > 0.1: # verifiier la puissance active (0.1 pour eliminer l'erreurs de calcul)
- indexFS.append(ii)
- flagFS = 1 # raise flag loadshedding
- fxshnt[0].append(position) # Position seems to correspond to the number of the case we are treating
- fxshnt[0].extend(['' for i in range(
- len(indexFS) - 1)]) # why [0] ? Maybe it would be better to have 2 lists ? Or a dict ?
- fxshnt[1].append(shunt[ii][0])
- fxshnt[2].append(shunt[ii][2])
- indicFS = sum(fxshnt[2]) # sum all Effective MW loads
- fxshnt = list(zip(*fxshnt)) # transpose the matrix
-
- # 3. Affiche Y
- # sizeY4 = len(shunt)
- y = np.zeros(2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY3 + 2 * sizeY6 + sizeY4 + sizeY8 + 3 * sizeY5 + 3 * sizeY7)
- z = np.zeros(12) # np.zeros returns a new array of the given shape and type filled with zeros
- rate_mat_index = Irate_num + 2
- rate_mat_index_3w = Irate_num + 4
- Ymac = np.zeros(sizeY0)
- if ok:
- # Creates the quantities of interest
- for i in range(sizeY2):
- if lines[i][rate_mat_index] > 100:
- z[0] += 1 # Number of lines above 100% of their limits
- for i in range(sizeY5):
- if transf[i][rate_mat_index] > 100:
- z[1] += 1 # Number of transformers above 100% of their limits
- for i in range(sizeY7):
- if transf3[i][rate_mat_index_3w] > 100:
- z[1] += 1 # Add number of 3w transformers above 100% of their limits
- for i in range(sizeY1):
- if buses[i][2] > buses[i][5]:
- z[2] += 1
- if buses[i][2] < buses[i][4]:
- z[2] += 1 # Number of buses outside of their voltage limits
- for i in range(sizeY0):
- z[3] += float(plants[i][3]) # Total active production
- for i in range(sizeY3):
- z[4] += float(loads[i][1]) # Total active consumption
- for i in range(sizeY6):
- z[4] += float(motors[i][1]) # add total active consumption from motors
- z[5] = (z[3] - z[4]) / z[3] * 100 # Active power losses
- for i in range(sizeY2):
- if lines[i][rate_mat_index] > z[6]:
- z[6] = lines[i][rate_mat_index] # Max flow in lines
- for i in range(sizeY5):
- if transf[i][rate_mat_index] > z[7]:
- z[7] = transf[i][rate_mat_index] # Max flow in transformers
- for i in range(sizeY7):
- if transf[i][rate_mat_index] > z[7]:
- z[7] = transf3[i][rate_mat_index_3w] # Max flow in 3w transformers
- for i in range(sizeY2):
- if lines[i][rate_mat_index] > 90:
- z[8] += 1
- z[8] = z[8] - z[0] # Number of lines between 90% and 100% of their limits
- for i in range(sizeY5):
- if transf[i][rate_mat_index] > 90:
- z[9] += 1
- for i in range(sizeY7):
- if transf3[i][rate_mat_index_3w] > 90:
- z[9] += 1
- z[9] = z[9] - z[1] # Number of transformers between 90% and 100% of their limits
-
- z[10] = indicFS
- z[11] = indicLS
-
- # Creates the output vectors
- for Pmach in range(sizeY0):
- y[Pmach] = float(plants[Pmach][3])
- Ymac[Pmach] = float(plants[Pmach][3])
- for Qmach in range(sizeY0):
- y[Qmach + sizeY0] = float(plants[Qmach][4])
- for Vbus in range(sizeY1):
- y[Vbus + 2 * sizeY0] = float(buses[Vbus][2])
- for Iline in range(sizeY2):
- y[Iline + 2 * sizeY0 + sizeY1] = float(lines[Iline][rate_mat_index])
- for Pline in range(sizeY2):
- y[Pline + 2 * sizeY0 + sizeY1 + sizeY2] = float(lines[Pline][6])
- for Qline in range(sizeY2):
- y[Qline + 2 * sizeY0 + sizeY1 + 2 * sizeY2] = float(lines[Qline][7])
- for Itrans in range(sizeY5):
- y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2] = float(transf[Itrans][rate_mat_index])
- for Ptrans in range(sizeY5):
- y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY5] = float(transf[Ptrans][6])
- for Qtrans in range(sizeY5):
- y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 2 * sizeY5] = float(transf[Qtrans][7])
- for Itrans in range(sizeY7):
- y[Itrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5] = float(transf3[Itrans][rate_mat_index_3w])
- for Ptrans in range(sizeY7):
- y[Ptrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + sizeY7] = float(transf3[Ptrans][8])
- for Qtrans in range(sizeY7):
- y[Qtrans + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 2 * sizeY7] = float(transf3[Qtrans][9])
- for Pload in range(sizeY3):
- y[Pload + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7] = float(loads[Pload][1])
- for Pmotor in range(sizeY6):
- y[Pmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3] = float(
- motors[Pmotor][1])
- for Qmotor in range(sizeY6):
- y[Qmotor + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + sizeY6] = float(
- motors[Qmotor][2])
- for Qshunt in range(sizeY4):
- y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6] = float(
- shunt[Qshunt][4])
- for Qshunt in range(sizeY8):
- y[Qshunt + 2 * sizeY0 + sizeY1 + 3 * sizeY2 + 3 * sizeY5 + 3 * sizeY7 + sizeY3 + 2 * sizeY6 + sizeY4] = float(
- swshunt[Qshunt][4])
-
- saveOPFresults(plants)
-# if opf.iopt_obj=='shd':# and indicLS > 0.1*len(loads_base):
-# # for ind in indexLS: # only act on loads that have been shed
-# # load = loads_base[ind]
-# # #if load[11].iShedding == 1: # if loadshedding allowed on the bus
-# for ind,load in enumerate(loads_base):
-# try: #disactivate triggers, save results
-# loadPscale = load[6].GetChildren(1, 'plini.Charef', 1)
-# loadQscale = load[6].GetChildren(1, 'qlini.Charef', 1)
-# loadPscale[0].outserv = 1
-# loadQscale[0].outserv = 1
-# load[6].plini = loads[ind][1]
-# load[6].qlini = loads[ind][2]
-# except:
-# pass
-
-
- scenario.Save()
-
-
-
- # if len(gen_UC_list) == 0:
- scenario_temporaire0.Delete()
- res_final = [list(y), list(z), list(Ymac), indicLS, indicFS, list(loadShed),
- list(fxshnt)] # sauvegarder le resultat dans un fichier pickle
- with open(dico['doc_base'] + '/' + app.GetActiveStudyCase().loc_name + '.final', 'wb') as fichier:
- mon_pickler = pickle.Pickler(fichier, protocol=2)
- mon_pickler.dump(res_final)
-
-stop = time.clock();print(' run study cases'+str(len(_cas))+' in correct_comtask.py in ' + str(round(stop - start, 3)) + ' seconds');start = stop;
-# app.Show()
-# aa=1
+++ /dev/null
-#-------------------------------------------------------------------------------
-# Name: module1
-# Purpose:
-#
-# Author: j15773
-#
-# Created: 09/06/2016
-# Copyright: (c) j15773 2016
-# Licence: <your licence>
-#-------------------------------------------------------------------------------
-
-import os
-import sys
-import numpy as np
-from support_functions import *
-
-NetworkFile=r"C:\Users\j15773\Documents\GTDosier\PSEN\Versions\PSEN_V13 - ec dispatch\Test Case ECD\JPS Network 2019 - half load.sav"
-PSSE_PATH=r"C:\Program Files (x86)\PTI\PSSE33\PSSBIN"
-ecd_file = r"C:\Users\j15773\Documents\GTDosier\PSEN\Versions\PSEN_V13 - ec dispatch\Test Case ECD\Jam19_ECD.ecd"
-
-sys.path.append(PSSE_PATH)
-os.environ['PATH'] += ';' + PSSE_PATH + ';'
-
-import psspy
-import redirect
-
-###initialization PSSE
-psspy.psseinit(10000)
-_i=psspy.getdefaultint()
-_f=psspy.getdefaultreal()
-_s=psspy.getdefaultchar()
-redirect.psse2py()
-
-# Silent execution of PSSe
-islct=6 # 6=no output; 1=standard
-psspy.progress_output(islct)
-
-def EconomicDispatch(NetworkFile, ecd_file, LossesRatio, TapChange):
-
- #Network File
- psspy.case(NetworkFile)
- psspy.save(NetworkFile)
-
- #read contents
- all_inputs_base=read_sav(NetworkFile)
- buses_base=all_inputs_base[0]
- plants_base=all_inputs_base[3]
- loads_base=all_inputs_base[4]
- motors_base=all_inputs_base[6]
-
- #TotalLoad
- P_load = 0
- for load in loads_base:
- P_load += load[1]
- for motor in motors_base:
- P_load+= motor[1]
-
- #total gen not in ecd file
- f = open(ecd_file,'r')
- ecd_lines = f.readlines()
- ecd_genlist = []
- for line in ecd_lines:
- line = line.split('\t')
- busnum = int(line[0])
- genid = line[1].strip()
- ecd_genlist.append((busnum,genid))
- f.close()
-
- P_nondisp = 0
- P_disp = 0
- for gen in plants_base:
- busnum = gen[0]
- genid = gen[2].strip()
- pgen = gen[3]
- if (busnum,genid) in ecd_genlist:
- P_disp+=pgen
- else:
- P_nondisp+=pgen
- print P_disp
- print P_nondisp
- psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
- ierr1 = psspy.ecdi(3,1,1,ecd_file,1,[0.0,0.0])
- ierr2 = psspy.ecdi(3,1,2,ecd_file,1,[0.0,0.0])
- ierr3 = psspy.ecdi(3,1,3,ecd_file,0,[P_load*(1+LossesRatio) - P_nondisp,0.0])
- ierr4 = psspy.ecdi(3,1,4,ecd_file,1,[0.0,0.0])
-
- EcdErrorCodes = [ierr1,ierr2,ierr3,ierr4]
-
- # Newton-Raphson power flow calculation. Params:
- # tap adjustment flag (0 = disable / 1 = enable stepping / 2 = enable direct)
- # area interchange adjustement (0 = disable)
- # phase shift adjustment (0 = disable)
- # dc tap adjustment (1 = enable)
- # switched shunt adjustment (1 = enable)
- # flat start (0 = default / disabled, 1 = enabled), disabled parce qu'on n'est pas dans une situation de d?part
- # var limit (default = 99, -1 = ignore limit, 0 = apply var limit immediatly)
- # non-divergent solution (0 = disable)
- psspy.fnsl([TapChange, _i, _i, _i, _i, _i, _i,_i]) # Load flow Newton Raphson
- LFcode = psspy.solved()
-
- #check to see if swing bus outside limits
- Plimit = False
- Qlimit = False
- for bus in buses_base:
- bustype = int(bus[6])
- if bustype==3: #swing bus
- swingbusnum = int(bus[0])
- for gen in plants_base:
- busnum = gen[0]
- if busnum == swingbusnum:
- machid = gen[2]
- pmax = gen[6]
- qmax = gen[7]
- pmin = gen[9]
- qmin = gen[10]
- ierr, pgen = psspy.macdat(busnum,machid,'P')
- ierr, qgen = psspy.macdat(busnum,machid,'Q')
- if pgen > pmax or pgen < pmin:
- Plimit = True
- if qgen > qmax or qgen < qmin:
- Qlimit = True
- psspy.save(NetworkFile)
- return EcdErrorCodes, LFcode, Plimit, Qlimit
-
-EcdErrorCodes, LFcode, Plimit, Qlimit = EconomicDispatch(NetworkFile, ecd_file, 0.026, 1)
+++ /dev/null
-import PSENconfig # file with Eficas output dictionaries
-import os,sys,pickle
-# from support_functionsPF import *#Valentin
-from support_functionsPF import read_pfd,np
-# sys.path.append(PSENconfig.Dico['DIRECTORY']['PF_path'])#Valentin
-# os.environ['PATH'] += ';' + os.path.dirname(os.path.dirname(PSENconfig.Dico['DIRECTORY']['PF_path'])) + ';'#Valentin
-
-PFParams = PSENconfig.Dico['PF_PARAMETERS']
-
-import powerfactory
-
-app = powerfactory.GetApplication()
-# app.Show()
-user = app.GetCurrentUser()
-ComImp = user.CreateObject('ComPFDIMPORT')
-
-app.SetWriteCacheEnabled(1) # Disable consistency check
-ComImp.g_file = PSENconfig.Dico['DIRECTORY']['pfd_file']
-ComImp.g_target = user # project is imported under the user account
-err = ComImp.Execute() # Execute command starts the import process
-app.SetWriteCacheEnabled(0) # Enable consistency check
-if err:
- app.PrintError('Project could not be imported...')
- exit()
-prjs = user.GetContents('*.IntPrj')
-prjs.sort(key=lambda x: x.gnrl_modif, reverse=True)
-prj = prjs[0]
-app.ActivateProject(prj.loc_name)
-prj = app.GetActiveProject()
-studycase = app.GetActiveStudyCase()
-studycase.loc_name = 'BaseCase'
-app.PrintPlain('Project %s has been successfully imported.' % prj)
-ComImp.Delete()
-# stop = time.clock(); print('Imptor file first time ' + str(round(stop - start, 3)) + ' seconds'); start = stop;#++++++++++++++++
-#read sav
-all_inputs_init=read_pfd(app,prj.loc_name,recal=1)
-
-# all_inputs_base=read_pfd(Paths['pfd_file'])
-all_inputs_base=all_inputs_init
-buses_base=[]
-[buses_base.append(bus[0:8]) for bus in all_inputs_base[0]]
-lines_base = []
-[lines_base.append(bus[0:11]) for bus in all_inputs_base[1]]
-trans_base = []
-[trans_base.append(bus[0:11]) for bus in all_inputs_base[2]]
-plants_base = []
-[plants_base.append(bus[0:11]) for bus in all_inputs_base[3]]
-loads_base = []
-[loads_base.append(bus[0:6]) for bus in all_inputs_base[4]]
-shunt_base = []
-# shunt_base=all_inputs_base[5]
-[shunt_base.append(bus[0:6]) for bus in all_inputs_base[5]]
-motors_base = []
-# motors_base=all_inputs_base[6]
-[motors_base.append(bus[0:6]) for bus in all_inputs_base[6]]
-trans3_base = []
-# trans3_base=all_inputs_base[7]
-[trans3_base.append(bus[0:14]) for bus in all_inputs_base[7]]
-swshunt_base = []
-# swshunt_base=all_inputs_base[8]
-[swshunt_base.append(bus[0:6]) for bus in all_inputs_base[8]]
-########///////////////////////////////////////////////////////////##########
-filer=open('temp1.txt','r')
-_path=[]
-for line in filer:
- _path.append(line)
-filer.close()
-path_save = _path[0].replace('\n','')
-ldf = app.GetFromStudyCase('ComLdf')
-ldf.iopt_net = 0 # AC load flow
-ldf.iopt_at = 1 # automatic tap transfos
-ldf.iopt_asht = 1 # automatic shunt
-ldf.iopt_lim = 1 # reactive power limit
-ldf.iopt_plim = 1 # active power limit
-ldf.iopt_limScale = 1 # scale factor
-ldf.iopt_noinit = 1 # no initialisation load flow
-ldf.iopt_initOPF = 0 # utiliser pour OPF
-ldf.iShowOutLoopMsg = 0 # show off output
-ldf.iopt_show = 0 # show off output
-ldf.iopt_check = 0 # show off output
-ldf.iopt_chctr = 0 # show off output
-
-####OPF Parametrisation
-opf = app.GetFromStudyCase('ComOpf')
-opf.iopt_ACDC = 0 # AC OPF sans contingences
-
-#OPF Controls
-#opf.iopt_pd = 1 # dispatche de puissance active 1: OUI
-#opf.iopt_qd = 1 # Contrôle dispatch de puissance réactive des générateurs 1: OUI
-TapChange = 1 - int(PFParams['LOCK_TAPS']) # 0 if locked, 1 if stepping
-opf.iopt_trf = TapChange # Position prise tranfo 1: OUI
-#opf.iopt_sht = 1 # shunts commutables 0: NON
-
-#OPF Constraints
-#opf.iopt_brnch = 1 # Contrainte limite flux branche 1: OUI
-#opf.iopt_genP = 1 # Contrainte limite puissance active générateurs 1: OUI
-#opf.iopt_genQ = 1 # Contrainte limite puissance réactive générateurs 1: OUI
-#opf.iop_bus = 1 # contraintes de tension sur jeux de barres 0: NON
-#opf.iopt_add = 0 # Contrainte limite flux frontières :0 NON
-
-opf.iInit = 0 #OPF initialisation
-opf.iitr = int(PFParams['ITERATION_INTERIOR']) # controle du nombre d'iterations boucles intérieures
-opf.iitr_outer = 30 # controle du nombre d'iterations boucles externes.
-if PFParams['ALGORITHM'] == 'Optimum Power Flow':
- if PFParams['OBJECTIVE_FUNCTION'] == 'MINIMISATION_OF_COST':
- opf.iopt_obj = 'cst' # Fonction objectif = minimisation des coûts
- elif PFParams['OBJECTIVE_FUNCTION'] == 'LOADSHEDDING_COSTS':
- opf.iopt_obj = 'shd' # Fonction objectif = minimisation loadshedding cout
- elif PFParams['OBJECTIVE_FUNCTION'] == 'MINIMISATION_OF_LOSSES':
- opf.iopt_obj = 'los' # Fonction objectif = minimisation de la perte totale du réseau
- elif PFParams['OBJECTIVE_FUNCTION'] == 'MAXIMISATION_MVAR_RESERVE':
- opf.iopt_obj = 'rpr' # Fonction objectif = minimisation de la perte totale du réseau
-
- # creer trigger
- # preparation, effacer les anciens caracteristiques
- fOplib = app.GetProjectFolder('oplib') # Dossier contient dossier caracteristique
- fChar = app.GetProjectFolder('chars') # Dossier contient triggers
- if fChar == None:
- fChar = fOplib.GetChildren(1, 'Characteristics.IntPrjfolder', 1)
- if fChar == []:
- fChar = fOplib.CreateObject('IntPrjfolder', 'Characteristics')
- fChar.iopt_typ = 'chars'
- else:
- fChar = fChar[0]
- fChar.iopt_typ = 'chars'
- fScale = fChar.GetChildren(1, '*.IntScales')
- if fScale == []:
- fScale = fChar.CreateObject('IntScales')
- else:
- fScale = fScale[0]
- trifiles = fScale.GetChildren(1, '*.TriFile', 1)
- for trifile in trifiles:
- trifile.Delete()
- chavecs = fChar.GetChildren(1, '*.ChaVecFile', 1)
- for chavec in chavecs:
- chavec.Delete()
- fCase = app.GetActiveStudyCase()
- settriggers = fCase.GetChildren(1, '*.SetTrigger', 1)
- for settriger in settriggers:
- settriger.Delete()
-
-app.SaveAsScenario('Base', 1) # creer scenario pour sauvegarder le cas de base
-scenario_temporaire = app.GetActiveScenario()
-scenario_temporaire.Save()
-scenario_temporaire.Deactivate()
-
-ComExp = user.CreateObject('ComPfdExport')
-app.SetWriteCacheEnabled(1) # Disable consistency check
-ComExp.g_objects = [prj] # define the project to be exported
-ComExp.g_file = os.path.join(path_save, "BaseCase.pfd")
-err = ComExp.Execute() # Command starts the export process
-if err:
- app.PrintError('Project could not be exported...')
- exit()
-app.SetWriteCacheEnabled(0) # Enable consistency check
-# app.PrintPlain('Project %s has been successfully exported to BaseCase.' % prj)
-print(prj)
-print(prj.loc_name)
-ComExp.Delete()
-prj.Delete()
-
-# buses_base,lines_base,trans_base,plants_base,loads_base,shunt_base,motors_base,trans3_base,swshunt_base
-# sauvegarder le resultat dans un fichier pickle
-res_final=[buses_base,lines_base,trans_base,plants_base,loads_base,shunt_base,motors_base,trans3_base,swshunt_base]
-with open('param_base', 'wb') as fichier:
- mon_pickler = pickle.Pickler(fichier, protocol=2)
- mon_pickler.dump(res_final)
-# aa=1
\ No newline at end of file
+++ /dev/null
-##########################################
-# ojectif de ce module: lancer le calcul parallele
-##########################################
-
-import time
-import PSENconfig # file with Eficas output dictionaries
-import os,sys,pickle
-# from support_functionsPF import *#Valentin
-from support_functionsPF import read_pfd,np
-from math import *
-import csv
-
-stop = time.clock(); start = stop;
-PFParams = PSENconfig.Dico['PF_PARAMETERS']
-with open(os.path.dirname(os.path.realpath(__file__))+'/data_dico', 'rb') as fichier:
- mon_depickler = pickle.Unpickler(fichier)
- dico = mon_depickler.load()
-x=dico['inputSamp']
-
-position = dico['position']
-# timeVect = dico['timeVect']
-LawsList = dico['CorrMatrix']['laws']
-N_1_LINES = dico['N_1_LINES']
-N_1_TRANSFORMERS = dico['N_1_TRANSFORMERS']
-N_1_MOTORS = dico['N_1_MOTORS']
-N_1_LOADS = dico['N_1_LOADS']
-N_1_GENERATORS = dico['N_1_GENERATORS']
-# inputSample = []
-# x_copy = []
-# #############################################################################/
-import powerfactory
-app = powerfactory.GetApplication()
-stop = time.clock(); print(' A0 in run_in_PFfunction.py in ' + str( round(stop - start, 3)) + ' seconds'); start = stop;
-user = app.GetCurrentUser()
-if dico['position'] == 0:
- ComImp = user.CreateObject('ComPFDIMPORT')
- app.SetWriteCacheEnabled(1) # Disable consistency check
- ComImp.g_file = os.path.join(dico['doc_base'], 'BaseCase.pfd')
- ComImp.g_target = user # project is imported under the user account
- err = ComImp.Execute() # Execute command starts the import process
- ComImp.Delete()
- app.SetWriteCacheEnabled(0) # Enable consistency check
-prjs = user.GetContents('*.IntPrj')
-prjs.sort(key=lambda x: x.gnrl_modif, reverse=True)
-prj = prjs[0]
-prj.Activate()
-
-#############################################################################/
-fOplib = app.GetProjectFolder('oplib') # Dossier contient dossier caracteristique
-fChar = app.GetProjectFolder('chars') # Dossier contient triggers
-fScale = fChar.GetChildren(1, '*.IntScales')[0]
-fScen = app.GetProjectFolder('scen') # Dossier contient triggers
-studycase0 = prj.GetContents('BaseCase.IntCase', 1)[0] # app.GetActiveStudyCase()
-studycase0.Activate()
-scen = fScen.GetChildren(1, 'Base.IntScenario', 1)[0]
-scen.Activate()
-settrigger0 = studycase0.GetChildren(1, 'set_iteration.SetTrigger', 1)
-if settrigger0:
- settrigger0[0].outserv=1
-fold = studycase0.fold_id
-all_inputs_init = read_pfd(app, prj.loc_name, recal=1)
-scen.Deactivate()
-stop = time.clock(); print(' A1 in run_in_PFfunction.py in ' + str( round(stop - start, 3)) + ' seconds'); start = stop;
-# print('read_pfd before loop ' + str(round(stop - start, 3)) + ' seconds');
-# start = stop; # ++++++++++++++++
-loads_base = all_inputs_init[4]
-plants_base = all_inputs_init[3]
-lines_base = all_inputs_init[1]
-transf_base = all_inputs_init[2]
-transf3_base = all_inputs_init[7]
-motors_base = all_inputs_init[6]
-
-## on ecrit les pgini initiaux (avant trigger) ds un fichier csv
-#initial_pginis = []
-#for plant in plants_base:
-# initial_pginis.append(plant[11].pgini)
-#
-#csvfile = os.path.join(dico['doc_base'], 'initial_pgini.csv')
-#g = open(csvfile,"wb")
-#cwt = csv.writer(g, delimiter=";")
-#for ipgini in initial_pginis:
-# cwt.writerow(ipgini)
-#g.close()
-
-
-
-trifiles = fScale.GetChildren(1, '*.TriFile', 1)
-stop = time.clock(); print(' A2 in run_in_PFfunction.py in ' + str( round(stop - start, 3)) + ' seconds'); start = stop;
-# creer trifile seulement une fois, au premier package
-if dico['position'] == 0:
- for trifile in trifiles:
- trifile.Delete()
- chavecs = fChar.GetChildren(1, '*.ChaVecFile', 1)
- for chavec in chavecs:
- chavec.Delete()
- settriggers = studycase0.GetChildren(1, '*.SetTrigger', 1)
- for settriger in settriggers:
- settriger.Delete()
- tri1 = fScale.CreateObject('TriFile')
- tri1.loc_name = 'set_iteration'
- tri1.iopt_time = 1
- tri1.unit = '1'
- settriger = studycase0.CreateObject('SetTrigger', 'set_iteration')
- # settriger= studycase0.GetChildren(1, 'set_iteration.SetTrigger', 1)[0]
- settriger.ptrigger = tri1
- effacers = studycase0.GetContents('*.ComPython', 0)
- for effacer in effacers:
- effacer.Delete()
- compython0 = studycase0.CreateObject('ComPython', 'comp0')
- compython0.filePath = os.path.dirname(os.path.realpath(__file__)) + '/comfile.py'
- effacers = fold.GetContents('*.Comtasks', 0)
- for effacer in effacers:
- effacer.Delete()
- comtask = fold.CreateObject('ComTasks')
-else:
- stop = time.clock();
- print(' A3 in run_in_PFfunction.py in ' + str(round(stop - start, 3)) + ' seconds');
- start = stop;
- tri1 = fScale.GetChildren(1, 'set_iteration.TriFile', 1)[0]
- stop = time.clock();
- print(' A4 in run_in_PFfunction.py in ' + str(round(stop - start, 3)) + ' seconds');
- start = stop;
- settriger = studycase0.GetChildren(1, 'set_iteration.SetTrigger', 1)[0]
- stop = time.clock();
- print(' A5 in run_in_PFfunction.py in ' + str(round(stop - start, 3)) + ' seconds');
- start = stop;
- comtask = fold.GetContents('*.Comtasks', 0)[0]
- comtask.Delete()
- stop = time.clock(); print(' A6 in run_in_PFfunction.py in ' + str(round(stop - start, 3)) + ' seconds'); start = stop;
- comtask = fold.CreateObject('ComTasks')
- # comtask.RemoveStudyCases()
-stop = time.clock(); print(' A7 in run_in_PFfunction.py in ' + str( round(stop - start, 3)) + ' seconds'); start = stop;
-lenlaw = len(x[0]) - 1 # nombre de laws
-nameN1 = [] # nom des elements N_1
-for N1 in N_1_LINES:
- nameN1.append(N1)
-for N1 in N_1_TRANSFORMERS:
- nameN1.append(N1)
-for N1 in N_1_MOTORS:
- nameN1.append(N1)
-for N1 in N_1_LOADS:
- nameN1.append(N1)
-for N1 in N_1_GENERATORS:
- nameN1.append(N1)
-
-charefs = prj.GetChildren(1, '*.ChaRef', 1)
-for charef in charefs:
- charef.Delete()
-stop = time.clock(); print(' Prepare chavecfile and characteristic in run_in_PFfunction.py in ' + str( round(stop - start, 3)) + ' seconds'); start = stop;
- # Begin creer chavecfile et les caracteristiques
-for i, law in enumerate(LawsList):
- if law != 'N_1_fromFile':
- if dico['Laws'][law]['ComponentType'] == 'Generator' and 'Level' in dico['Laws'][law][
- 'Type']: # niveau de puissance
- if dico['Laws'][law]['TransferFunction'] == True:
- if dico['Laws'][law]['TF_Input'] == '.pow file':
- z_WS = dico['Laws'][law]['Wind_Speed_Measurement_Height']
- pathWT = dico['Laws'][law]['File_Name']
- HH = dico['Laws'][law]['Hub_Height']
- alpha = dico['Laws'][law]['AlphaWS']
- PercentLoss = dico['Laws'][law]['Percent_Losses']
- x_copy[ite][i] = eol(np.array([x[ite][i]]), z_WS, pathWT, HH, alpha, PercentLoss)[0]
- # x_copy[ite][i]=x[ite][i]
- elif dico['Laws'][law]['TF_Input'] == 'tuples list':
- x_copy[ite][i] = applyTF(x[ite][i], dico['Laws'][law]['TF_Values'])
- # else: # ensure values are between 0 and 1
- # Pval = x[ite][i]
- # Pval = min(Pval, 1)
- # Pval = max(Pval, 0)
- # x_copy[ite][i] = Pval
- ###################=======================================
- if dico['Laws'][law]['ComponentType'] == 'Load' and ('Unavailability' not in dico['Laws'][law]['Type']):
- LoadList = dico['Laws'][law]['Load']
- for LoadName in LoadList: # plusieurs loads possible
- busNum = dico['Loads'][LoadName]['NUMBER']
- ID = dico['Loads'][LoadName]['ID']
- P = dico['Loads'][LoadName]['P']
- Q = dico['Loads'][LoadName]['Q']
- for load in loads_base:
- if (load[0] == busNum) and (load[5] == ID): # cree trigger
- chavec_1 = fChar.CreateObject('ChaVecFile', 'Load_' + LoadName)
- chavec_1.f_name = os.path.join(os.getcwd(),
- 'data_trigger.csv') # fichier .csv de la caracteristique
- chavec_1.usage = 1
- chavec_1.datacol = i + 2
- chavec_1.scale = tri1
- load[6].plini = load[6].plini
- ref = load[6].CreateObject('charef', 'plini')
- ref.typ_id = chavec_1
-# refP = load[6].GetChildren(1, 'plini.Charef',1)
-# refP[0].outserv = 0
- ref = load[6].CreateObject('charef', 'qlini')
- ref.typ_id = chavec_1
-# refQ = load[6].GetChildren(1, 'qlini.Charef',1)
-# refQ[0].outserv = 0
- break
-
-
- # Motor Load Law: change the values of the different induction motor loads and treat large changes of load to help convergence
- # if dico['Laws'][law]['ComponentType']=='Motor' and ('N_1' not in law) and ('out' not in law.lower()):
- if dico['Laws'][law]['ComponentType'] == 'Motor' and ('Unavailability' not in dico['Laws'][law]['Type']):
- MotorList = dico['Laws'][law]['Motor']
- # if x_copy[ite][i] > 0.75: # On change directement l(es) charge(s)
- for MotorName in MotorList:
- busNum = dico['Motors'][MotorName]['NUMBER']
- ID = dico['Motors'][MotorName]['ID']
- Pmax = dico['Motors'][MotorName]['PMAX']
- for motor in motors_base:
- if (motor[0] == busNum) and (motor[5] == ID): # cree trigger
- chavec_1 = fChar.CreateObject('ChaVecFile', 'Mo_' + MotorName)
- chavec_1.f_name = os.path.join(os.getcwd(),
- 'data_trigger.csv') # fichier .csv de la caracteristique
- chavec_1.usage = 1
- chavec_1.datacol = i + 2
- chavec_1.scale = tri1
- motor[6].pgini = Pmax
- ref = motor[6].CreateObject('charef', 'pgini')
- ref.typ_id = chavec_1
- break
-
- # Generator Law : Change generation level
- # if dico['Laws'][law]['ComponentType']=='Generator' and ('N_1' not in law) and ('out' not in law.lower()):
- if dico['Laws'][law]['ComponentType'] == 'Generator' and ('Unavailability' not in dico['Laws'][law]['Type']):
- GenList = dico['Laws'][law]['Generator']
- for GenName in GenList:
- busNum = dico['Generators'][GenName]['NUMBER']
- ID = dico['Generators'][GenName]['ID']
- Pmax = dico['Generators'][GenName]['PMAX']
- # Pmin = dico['Generators'][GenName]['PMIN']
- for plant in plants_base:
- if (plant[0] == busNum) and (plant[2] == ID): # cree trigger
- chavec_1 = fChar.CreateObject('ChaVecFile', 'Gen_' + GenName)
- chavec_1.f_name = os.path.join(os.getcwd(),
- 'data_trigger.csv') # fichier .csv de la caracteristique
- chavec_1.usage = 1
- chavec_1.datacol = i + 2
- chavec_1.scale = tri1
- plant[11].pgini = Pmax
-# ref = plant[11].CreateObject('charef', 'pgini')
- ref = plant[11].CreateObject('charef', 'pgini') # CM
- ref.typ_id = chavec_1
- ref = plant[11].CreateObject('charef', 'qgini')
- ref.typ_id = chavec_1
- break
-
- # Line or Transformer Unavailability Law: disconnect component if sample=0
- elif dico['Laws'][law]['ComponentType'] == 'Line' or dico['Laws'][law][
- 'ComponentType'] == 'Transformer':
- compType = dico['Laws'][law]['ComponentType']
- CompList = dico['Laws'][law][compType]
- for Name in CompList:
- from_bus = dico[compType + 's'][Name]['FROMNUMBER']
- to_bus = dico[compType + 's'][Name]['TONUMBER']
- ID = dico[compType + 's'][Name]['ID']
- if compType == 'Line': # couper line
- for line in lines_base:
- if (from_bus == line[0]) and (to_bus == line[1]) and (line[10] == ID): # cree trigger
- chavec_1 = fChar.CreateObject('ChaVecFile', 'Line_' + Name)
- chavec_1.f_name = os.path.join(os.getcwd(),
- 'data_trigger.csv') # fichier .csv de la caracteristique
- chavec_1.usage = 2
- chavec_1.datacol = i + 2
- chavec_1.scale = tri1
- line[11].outserv = line[11].outserv
- ref = line[11].CreateObject('charef', 'outserv')
- ref.typ_id = chavec_1
- break
- elif compType == 'Transformer': # couper transfor 2 winding
- if dico[compType + 's'][Name]['#WIND'] == 2:
- for tranf in transf_base:
- if (from_bus == tranf[0]) and (to_bus == tranf[1]) and (tranf[10] == ID):
- chavec_1 = fChar.CreateObject('ChaVecFile', 'Transf_' + Name)
- chavec_1.f_name = os.path.join(os.getcwd(),
- 'data_trigger.csv') # fichier .csv de la caracteristique
- chavec_1.usage = 2
- chavec_1.datacol = i + 2
- chavec_1.scale = tri1
- tranf[11].outserv = tranf[11].outserv
- ref = tranf[11].CreateObject('charef', 'outserv')
- ref.typ_id = chavec_1
- break
- elif dico[compType + 's'][Name]['#WIND'] == 3: # couper transfor 3 winding
- three_bus = dico[compType + 's'][Name]['3NUMBER']
- for tranf in transf3_base:
- if (from_bus == tranf[0]) and (to_bus == tranf[1]) and (three_bus == tranf[2]) and (
- tranf[13] == ID):
- chavec_1 = fChar.CreateObject('ChaVecFile', 'Transf3_' + Name)
- chavec_1.f_name = os.path.join(os.getcwd(),
- 'data_trigger.csv') # fichier .csv de la caracteristique
- chavec_1.usage = 2
- chavec_1.datacol = i + 2
- chavec_1.scale = tri1
- tranf[14].outserv = tranf[14].outserv
- ref = tranf[14].CreateObject('charef', 'outserv')
- ref.typ_id = chavec_1
- break
- # x2.append(x_copy[ite][i]) # store values sampled for logger function
-
- elif (dico['Laws'][law]['ComponentType'] == 'Generator' and (
- 'Unavailability' in dico['Laws'][law]['Type'])) or \
- (dico['Laws'][law]['ComponentType'] == 'Load' and (
- 'Unavailability' in dico['Laws'][law]['Type'])) or \
- (dico['Laws'][law]['ComponentType'] == 'Motor' and (
- 'Unavailability' in dico['Laws'][law]['Type'])):
- compType = dico['Laws'][law]['ComponentType']
- CompList = dico['Laws'][law][compType]
-
- for Name in CompList:
- busNum = dico[compType + 's'][Name]['NUMBER']
- ID = dico[compType + 's'][Name]['ID']
- if compType == 'Generator':
- for plant in plants_base:
- if (plant[0] == busNum) and (plant[2] == ID): # cree trigger
- chavec_1 = fChar.CreateObject('ChaVecFile', 'Gen_' + Name)
- chavec_1.f_name = os.path.join(os.getcwd(),
- 'data_trigger.csv') # fichier .csv de la caracteristique
- chavec_1.usage = 2
- chavec_1.datacol = i + 2
- chavec_1.scale = tri1
- plant[11].outserv = plant[11].outserv
- ref = plant[11].CreateObject('charef', 'outserv')
- ref.typ_id = chavec_1
- break
-
- elif compType == 'Load':
- for load in loads_base:
- if (load[0] == busNum) and (load[5] == ID): # cree trigger
- chavec_1 = fChar.CreateObject('ChaVecFile', 'Load_' + Name)
- chavec_1.f_name = os.path.join(os.getcwd(),
- 'data_trigger.csv') # fichier .csv de la caracteristique
- chavec_1.usage = 2
- chavec_1.datacol = i + 2
- chavec_1.scale = tri1
- load[6].outserv = load[6].outserv
- ref = load[6].CreateObject('charef', 'outserv')
- ref.typ_id = chavec_1
- break
- elif compType == 'Motor':
- for motor in motors_base:
- if (motor[0] == busNum) and (motor[5] == ID): # cree trigger
- chavec_1 = fChar.CreateObject('ChaVecFile', 'Mo_' + Name)
- chavec_1.f_name = os.path.join(os.getcwd(),
- 'data_trigger.csv') # fichier .csv de la caracteristique
- chavec_1.usage = 2
- chavec_1.datacol = i + 2
- chavec_1.scale = tri1
- motor[6].outserv = motor[6].outserv
- ref = motor[6].CreateObject('charef', 'outserv')
- ref.typ_id = chavec_1
- break
- #######wwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwwww============
- else: # law=='N_1_fromFile"
- for line_name in N_1_LINES:
- from_bus = dico['Lines'][line_name]['FROMNUMBER']
- to_bus = dico['Lines'][line_name]['TONUMBER']
- ID = dico['Lines'][line_name]['ID']
- for line in lines_base:
- if (from_bus == line[0]) and (to_bus == line[1]) and (line[10] == ID): # cree trigger
- chavec_1 = fChar.CreateObject('ChaVecFile', 'Line_' + line_name)
- chavec_1.f_name = os.path.join(os.getcwd(),
- 'data_trigger.csv') # fichier .csv de la caracteristique
- chavec_1.usage = 2
- for i, name in enumerate(nameN1):
- if line_name == name:
- chavec_1.datacol = lenlaw + i + 2
- break
- chavec_1.scale = tri1
- outs = line[11].GetChildren(1, 'outserv.Charef', 1)
- for out in outs:
- out.Delete() # s'il y a deja un trifile, effacer, prioriter N_1_fromfile
- line[11].outserv = line[11].outserv
- ref = line[11].CreateObject('charef', 'outserv')
- ref.typ_id = chavec_1
- break
-
- for transfo_name in N_1_TRANSFORMERS:
- from_bus = dico['Transformers'][transfo_name]['FROMNUMBER']
- to_bus = dico['Transformers'][transfo_name]['TONUMBER']
- ID = dico['Transformers'][transfo_name]['ID']
- if dico['Transformers'][transfo_name]['#WIND'] == 2:
- for tranf in transf_base:
- if (from_bus == tranf[0]) and (to_bus == tranf[1]) and (tranf[10] == ID):
- chavec_1 = fChar.CreateObject('ChaVecFile', 'Transf_' + transfo_name)
- chavec_1.f_name = os.path.join(os.getcwd(),
- 'data_trigger.csv') # fichier .csv de la caracteristique
- chavec_1.usage = 2
- for i, name in enumerate(nameN1):
- if transfo_name == name:
- chavec_1.datacol = lenlaw + i + 2
- break
- chavec_1.scale = tri1
- outs = tranf[11].GetChildren(1, 'outserv.Charef', 1)
- for out in outs:
- out.Delete() # s'il y a deja un trifile, effacer, prioriter N_1_fromfile
- tranf[11].outserv = tranf[11].outserv
- ref = tranf[11].CreateObject('charef', 'outserv')
- ref.typ_id = chavec_1
- break
-
- elif dico['Transformers'][transfo_name]['#WIND'] == 3: # couper transfor 3 winding
- three_bus = dico['Transformers'][transfo_name]['3NUMBER']
- for tranf in transf3_base:
- if (from_bus == tranf[0]) and (to_bus == tranf[1]) and (three_bus == tranf[2]) and (
- tranf[13] == ID):
- chavec_1 = fChar.CreateObject('ChaVecFile', 'Transf_' + transfo_name)
- chavec_1.f_name = os.path.join(os.getcwd(),
- 'data_trigger.csv') # fichier .csv de la caracteristique
- chavec_1.usage = 2
- for i, name in enumerate(nameN1):
- if transfo_name == name:
- chavec_1.datacol = lenlaw + i + 2
- break
- chavec_1.scale = tri1
- outs = tranf[14].GetChildren(1, 'outserv.Charef', 1)
- for out in outs:
- out.Delete()
- tranf[14].outserv = tranf[14].outserv
- ref = tranf[14].CreateObject('charef', 'outserv')
- ref.typ_id = chavec_1
- break
-
- for motor_name in N_1_MOTORS:
- busNum = dico['Motors'][motor_name]['NUMBER']
- ID = dico['Motors'][motor_name]['ID']
-
- for motor in motors_base:
- if (motor[0] == busNum) and (motor[5] == ID): # cree trigger
- chavec_1 = fChar.CreateObject('ChaVecFile', 'Mo_' + motor_name)
- chavec_1.f_name = os.path.join(os.getcwd(),
- 'data_trigger.csv') # fichier .csv de la caracteristique
- chavec_1.usage = 2
- for i, name in enumerate(nameN1):
- if motor_name == name:
- chavec_1.datacol = lenlaw + i + 2
- break
- chavec_1.scale = tri1
- outs = motor[6].GetChildren(1, 'outserv.Charef', 1)
- for out in outs:
- out.Delete() # s'il y a deja un trifile, effacer, prioriter N_1_fromfile
- motor[6].outserv = motor[6].outserv
- ref = motor[6].CreateObject('charef', 'outserv')
- ref.typ_id = chavec_1
- break
-
- for load_name in N_1_LOADS:
- busNum = dico['Loads'][load_name]['NUMBER']
- ID = dico['Loads'][load_name]['ID']
- for load in loads_base:
- if (load[0] == busNum) and (load[5] == ID): # cree trigger
- chavec_1 = fChar.CreateObject('ChaVecFile', 'Load_' + load_name)
- chavec_1.f_name = os.path.join(os.getcwd(),
- 'data_trigger.csv') # fichier .csv de la caracteristique
- chavec_1.usage = 2
- for i, name in enumerate(nameN1):
- if load_name == name:
- chavec_1.datacol = lenlaw + i + 2
- break
- chavec_1.scale = tri1
- outs = load[6].GetChildren(1, 'outserv.Charef', 1)
- for out in outs:
- out.Delete()
- load[6].outserv = load[6].outserv
- ref = load[6].CreateObject('charef', 'outserv')
- ref.typ_id = chavec_1
- break
-
- for group_name in N_1_GENERATORS:
- busNum = dico['Generators'][group_name]['NUMBER']
- ID = dico['Generators'][group_name]['ID']
- for plant in plants_base:
- if (plant[0] == busNum) and (plant[2] == ID): # cree trigger
- chavec_1 = fChar.CreateObject('ChaVecFile', 'Gen_' + group_name)
- chavec_1.f_name = os.path.join(os.getcwd(),
- 'data_trigger.csv') # fichier .csv de la caracteristique
- chavec_1.usage = 2
- for i,name in enumerate(nameN1):
- if group_name == name:
- chavec_1.datacol = lenlaw + i + 2
- break
- chavec_1.scale = tri1
- outs = plant[11].GetChildren(1, 'outserv.Charef', 1)
- for out in outs:
- out.Delete()
- plant[11].outserv = plant[11].outserv
- ref = plant[11].CreateObject('charef', 'outserv')
- ref.typ_id = chavec_1
- break
-
-
- # chemin=os.getcwd()
-stop = time.clock(); print(' Prepare chavec for N_1_fromfile in run_in_PFfunction.py in ' + str(round(stop - start, 3)) + ' seconds'); start = stop;
-print('======= BEGIN copy studycases'+' ==================')
-if settrigger0:
- settrigger0[0].outserv=0
-for ite in range(len(x)):
- # inputSample.append(np.array(x[ite]))
- studycase = fold.AddCopy(studycase0, 'Case_'+str(position))
- settriger_iter = studycase.GetChildren(1, 'set_iteration.SetTrigger', 1)[0]
- settriger_iter.ftrigger = position
- compy = studycase.GetContents('*.ComPython', 0)[0]
- comtask.AppendStudyCase(studycase)
- comtask.AppendCommand(compy)
- position+=1
-if settrigger0:
- settrigger0[0].outserv=1
-stop = time.clock();print(' Copy study case in run_in_PFfunction.py in ' + str(round(stop - start, 3)) + ' seconds');start = stop;
-err=comtask.Execute()
-
-# app.Show()
-aa=1
\ No newline at end of file
+++ /dev/null
-from support_functions import *
-
-##import os,sys,random,string
-##import PSENconfig
-##sys.path.append(PSENconfig.Dico['DIRECTORY']['PSSPY_path'])
-##os.environ['PATH'] = PSENconfig.Dico['DIRECTORY']['PSSE_path'] + ";"+ os.environ['PATH']
-##import psspy
-
-ropfile = r'D:\DEWA Solar 2017\2018 DEWA peak_fullGCCIA.rop'
-savfile = r'D:\DEWA Solar 2017\2018 DEWA peak_fullGCCIA.sav'
-savfile2 = r'D:\DEWA Solar 2017\2018 DEWA peak_fullGCCIA2.sav'
-GenDispatchData, DispTableData, LinCostTables, QuadCostTables, PolyCostTables, GenReserveData, PeriodReserveData,AdjBusShuntData,AdjLoadTables = readOPFdata(ropfile)
-
-
-_i=psspy.getdefaultint()
-_f=psspy.getdefaultreal()
-_s=psspy.getdefaultchar()
-redirect.psse2py()
-#import pssdb
-psspy.psseinit(80000)
-
-# Silent execution of PSSe
-islct=6 # 6=no output; 1=standard
-psspy.progress_output(islct)
-
-psspy.case(savfile)
-
-NoDisconnectionAllowedTotal = []
-for res in PeriodReserveData:
- ResNum = res[0]
- ResLevel = res[1]
- ResPeriod = res[2]
- InService = res[3]
- if InService == 0:
- continue
- ParticipatingUnits = res[4]
- ParticipatingUnitsFull = []
- NoDisconnectionAllowed = []
- for unit in ParticipatingUnits:
- busNum = unit[0]
- ID = unit[1]
-
- for gen in GenReserveData:
- busNum2 = gen[0]
- ID2 = gen[1]
- if busNum==busNum2 and ID == ID2:
- ramp =gen[2]
- Pmax = gen[3]
- break
-
- for gen in GenDispatchData:
- busNum3 = gen[0]
- ID3 = gen[1]
- if busNum==busNum3 and ID == ID3:
- dispatch = gen[2]
- dispTable = gen[3]
- break
-
- for dTable in DispTableData:
- dispTable2 = dTable[0]
- if dispTable == dispTable2:
- PmaxTable = dTable[1]
- PminTable = dTable[2]
- FuelCostScaleCoef = dTable[3]
- CurveType = dTable[4] #2 = piece wise linear,
- Status = dTable[5]
- CostTable = dTable[6]
- break
-
- for table in LinCostTables:
- CostTable2 = table[0]
- if CostTable2==CostTable:
- numpoints = table[1]
- points = table[2]
- break
-
- MaxContribution = min(ResPeriod * ramp, Pmax)
-
- for i,[x,y] in enumerate(points):
- if x > Pmax:
- x1 = x
- y1 = y
- x0 = points[i-1][0]
- y0 = points[i-1][1]
- break
- y_i = (y1 - y0)*Pmax/(x1-x0)
-
- CostCoef = y_i / Pmax
-
- ParticipatingUnitsFull.append([busNum, ID, Pmax, dispTable, MaxContribution, CostCoef])
-
- ParticipatingUnitsFull.sort(key=lambda x: x[-1], reverse=False)
- ReserveCapability = 0
-
- for unit in ParticipatingUnitsFull:
- MaxContribution = unit[4]
- if ReserveCapability >= ResLevel:
- break
- else:
- ReserveCapability += MaxContribution
- dispTable = unit[3]
- Pmax = unit[2]
- busNum = unit[0]
- ID = unit[1]
- NoDisconnectionAllowed.append([busNum, ID])
- Pmin = 0.12*Pmax
- psspy.opf_apdsp_tbl(dispTable,[_i,_i,_i],[_f, Pmin,_f])
-
- for grp in NoDisconnectionAllowed:
- if grp not in NoDisconnectionAllowedTotal:
- NoDisconnectionAllowedTotal.append(grp)
-
- psspy.save(savfile2)
-
-
-
+++ /dev/null
-# -*- coding: utf-8 -*-
-"""
-Created on Mon Jun 03 15:31:42 2013
-
-@author: B31272
-
-Fonctions de support
-"""
-import os,sys,random,string
-import PSENconfig
-sys.path.append(PSENconfig.Dico['DIRECTORY']['PSSPY_path'])
-os.environ['PATH'] = PSENconfig.Dico['DIRECTORY']['PSSE_path'] + ";"+ os.environ['PATH']
-import psspy
-import pssarrays
-import redirect
-
-
-import pdb
-import numpy as np
-from math import *
-from decimal import *
-from openturns import *
-from time import sleep, strftime, gmtime
-import multiprocessing
-from threading import Thread
-from Queue import Queue, Empty
-
-Debug = False #debug mode for PSSEFunct
-Disconnect_RES = False #disconnect renewable generators when generate 0 MW
-DEWA_PV_Qlimits = True #lower Q limits when P of renewable generators is < 0.2 Pmax
-ReserveCorrection = True #add Pmin to units that are necessary to satisfy reserve requirements so that they are not disconnected after unit commitment
-DisconnectThreshhold = 0.10 #seuil en per unit de la puissance active en dessous duquel on deconnecte des generateurs pour qu'ils ne participent ni à la reserve ni à la compensation reactive.
-#===============================================================================
-# DEFINITION DES FONCTIONS - CREATION OF THE FUNCTIONS
-#===============================================================================
-
-
-#read a ROP file containing all opf data
-def readOPFdata(RopFile):
-
- fo = open(RopFile, 'r')
- Lines = fo.readlines()
- fo.close()
-
- for i,line in enumerate(Lines):
- if 'begin Generator Dispatch data' in line:
- startgendisp = i+1
- if 'End of Generator Dispatch data' in line:
- endgendisp = i
- if 'begin Active Power Dispatch Tables' in line:
- startdisptable = i+1
- if 'End of Active Power Dispatch Tables' in line:
- enddisptable = i
- if 'begin Piece-wise Linear Cost Tables' in line:
- startlincosttable = i+1
- if 'End of Piece-wise Linear Cost Tables' in line:
- endlincosttable = i
- if 'begin Piece-wise Quadratic Cost Tables' in line:
- startquadcosttable = i+1
- if 'End of Piece-wise Quadratic Cost Tables' in line:
- endquadcosttable = i
- if 'begin Polynomial Cost Tables' in line:
- startpolycosttable = i+1
- if 'End of Polynomial Cost Tables' in line:
- endpolycosttable = i
- if 'begin Generation Reserve data' in line:
- startgenreservedata = i+1
- if 'End of Generation Reserve data' in line:
- endgenreservedata = i
- if 'begin Period Reserve data' in line:
- startperiodreservedata = i+1
- if 'end of period reserve' in line.lower():
- endperiodreservedata = i
- if 'begin Adjustable Bus Shunt data' in line:
- startadjbusshuntdata = i+1
- if 'End of Adjustable Bus Shunt data' in line:
- endadjbusshuntdata = i
- if 'begin Adjustable Bus Load Tables' in line:
- startadjloadtable = i+1
- if 'End of Adjustable Bus Load Tables' in line:
- endadjloadtable = i
-
-
- GenDispatchData = []
- for i in range(startgendisp,endgendisp):
- data = Lines[i].split()
- busNum = int(data[0])
- ID = data[1]
- dispatch = float(data[2])
- dispTable = int(data[3])
- GenDispatchData.append([busNum,ID,dispatch, dispTable])
-
- DispTableData = []
- for i in range(startdisptable,enddisptable):
- data = Lines[i].split()
- DispTable = int(data[0])
- Pmax = float(data[1])
- Pmin = float(data[2])
- FuelCostScaleCoef = float(data[3])
- CurveType = int(data[4]) #2 = piece wise linear,
- Status = int(data[5])
- CostTable = int(data[6])
- DispTableData.append([DispTable,Pmax,Pmin,FuelCostScaleCoef,CurveType,Status,CostTable])
-
- LinCostTables = []
- i = startlincosttable
- while i >= startlincosttable and i < endlincosttable:
- headerdata = Lines[i].split()
- CostTable = int(headerdata[0])
- #tableName = str(headerdata[1])
- numpoints = int(headerdata[-1])
- points=[]
- i+=1
- for k in range(numpoints):
- #pdb.set_trace()
- pointdata = Lines[i+k].split()
- x =float(pointdata[0])
- y =float(pointdata[1])
- points.append([x,y])
- i+=numpoints
- LinCostTables.append([CostTable, numpoints, points])
-
- QuadCostTables = []
- PolyCostTables = []
-
- GenReserveData = []
- for i in range(startgenreservedata,endgenreservedata):
- data = Lines[i].split()
- busNum = int(data[0])
- ID = data[1]
- ramp =float(data[2])
- Pmax = float(data[3])
- GenReserveData.append([busNum, ID, ramp, Pmax])
-
- PeriodReserveData = []
- for i in range(startperiodreservedata,endperiodreservedata):
- data = Lines[i].split()
- if len(data)==4:
- ResNum = int(data[0])
- ResLevel = float(data[1])
- ResPeriod = float(data[2])
- InService = int(data[3])
- ParticipatingUnits = []
- elif len(data)==2:
- busNum = int(data[0])
- ID = data[1]
- ParticipatingUnits.append([busNum,ID])
- elif 'End of Participating Reserve Units' in Lines[i]:
- PeriodReserveData.append([ResNum,ResLevel,ResPeriod,InService,ParticipatingUnits])
- else:
- pass
-
- AdjBusShuntData = []
- for i in range(startadjbusshuntdata,endadjbusshuntdata):
- data = Lines[i].split()
- busNum = int(data[0])
- ID = data[1]
- SuscInit = float(data[2])
- SuscMax = float(data[3])
- SuscMin = float(data[4])
- CostScale = float(data[5])
- InService = int(data[6])
- AdjBusShuntData.append([busNum,ID, SuscInit,SuscMax,SuscMin,CostScale,InService])
-
- AdjLoadTables = []
- for i in range(startadjloadtable,endadjloadtable):
- data = Lines[i].split()
- tableNum = int(data[0])
- LoadMult = float(data[1])
- Max = float(data[2])
- Min = float(data[3])
- CostScale = float(data[7])
- InService = int(data[9])
- AdjLoadTables.append([tableNum,LoadMult,Max,Min,CostScale,InService])
-
- return GenDispatchData, DispTableData, LinCostTables, QuadCostTables, PolyCostTables, GenReserveData, PeriodReserveData, AdjBusShuntData, AdjLoadTables
-
-
-#to remve a list from a string "['wind 1', 'wind 2', 'charge']" --> ['wind 1', 'wind 2', 'charge']
-def RemoveListfromString(List):
- List = List.replace("]","")
- List = List.replace("[","")
- List = List.replace(")","")
- List = List.replace("(","")
- List = List.replace("'","")
- List = List.replace('"',"")
- List = List.replace(" ","")
- List = List.split(",")
- return List
-
-def RemoveTuplesfromString(TList):
- TL = RemoveListfromString(TList)
- nTL = []
- for i in range(len(TL)/2):
- nTL.append([TL[2*i],float(TL[2*i+1])])
- return nTL
-
-###Fonction de transfert vent-puissance d'une eolienne
-##def eol_old(wind, WTconfig):
-## Vcin = WTconfig ['cutinWS']
-## Vrate = WTconfig ['ratedWS']
-## Vcout = WTconfig ['cutoutWS']
-## Rho = WTconfig ['rho']
-## lossrate = WTconfig ['lossrate']
-## if wind <= Vcin :
-## Pnorm=0
-## elif wind < Vrate :
-## Pnorm=wind*(1-lossrate)#((wind**2-Vcin**2)/(Vrate**2-Vcin**2)*Rho/1.225*(1-lossrate))
-## elif wind < Vcout :
-## Pnorm = 1*(1-lossrate)
-## else :
-## Pnorm=0
-## return Pnorm
-
-def applyTF(x_in, TF):
-
- X = []
- P = []
- for (x,p) in TF:
- X.append(x)
- P.append(p)
-
-
- Pmax=max(P)
- precision = 0.001
- #calculate precision of values entered
- for i in range(len(X)):
- d1 = Decimal(str(X[i]))
- d2 = Decimal(str(P[i]))
- d1expo = d1.as_tuple().exponent
- d2expo = d2.as_tuple().exponent
- expo = np.minimum(d1expo,d2expo)
- precision = min(10**(expo-1),precision)
-
-
- #change to array type for consistency
- X = np.array(X)
- P = np.array(P)
-
- #interpolate between values so that precise wind speed data doesnt output heavily discretized power levels
- from scipy import interpolate
- finterp = interpolate.interp1d(X,P, kind='linear')
- Xmin = min(X)
- Xmax = max(X)
- Xnew = np.arange(Xmin,Xmax,precision)
- Pnew = finterp(Xnew)
-
- #calculate power by applying transfer function
- if x_in >= Xmax-precision:
- index = len(Pnew)-1
- elif x_in <= Xmin + precision:
- index = 0
- else:
- index = int(round((x_in-Xmin)/precision))
- Power = Pnew[index]
-
- PowerNorm = Power/Pmax #normalize
-
- return PowerNorm
-
-
-
-def eol(WS, z_WS, pathWT, HH, alpha=1./7, PercentLoss = 5):
-
- '''
-
- Reconstitute wind production from wind speed histories for a single site.
-
- syntax:
- ACPower = ReconstituteWind(WS, z_WS, pathWT, N_turbines, HH, alpha=1./7, PercentLoss=5)
-
- inputs:
- WS: numpy array of wind speed measurements to be converted to production values
- z_WS: height, in meters above ground level, of the wind speed measurements
- pathWT: location of selected wind turbine technology's power curve file in computer file system
- N_turbines: number of wind turbines in the installation/farm being modelled
- HH: wind turbine hub height
- alpha (optional, default = 1/7): exponential factor describing the vertical wind profile; used to extrapolate
- wind data to hub height. Can be scalar or vector with same length as wind data.
- PercentLoss (optional, default = 5): percent loss due to multiple effects: the wake effect of adjacent wind turbines,
- cable resistance between wind turbine/farm and injection point, grid and turbine unavailability, extreme weather conditions, etc.
-.
-
- outputs:
- ACPower: numpy array of normalized expected wind production for the given wind farm.
-
- '''
-
-
- #open and treat wind turbine data in .pow file
- f = open(pathWT)
- lines = f.readlines()
- WTdata = {}
- WTdata["model"] = lines[0][1:-2]
- WTdata['diameter'] = float(lines[1][1:-2])
- WTdata['CutInWindSpeed'] = float(lines[4][1:-2])
- WTdata['CutOutWindSpeed'] = float(lines[3][1:-2])
- WTdata['PowerCurve'] = {}
- WTdata['PowerCurve']['WindSpeed'] = np.arange(0, 31)
- WTdata['PowerCurve']['Power'] = [float(0)] #in kW
- for i in range(5,35):
- WTdata['PowerCurve']['Power'].append(float(lines[i][1:-2]))
-
- WTdata['Pmax']=max(WTdata['PowerCurve']['Power'])
-
- #insert WT hub height
- WTdata['z'] = HH
-
- #correct wind speed values for appropriate height
- WS_hh = WS*(WTdata['z']/z_WS)**alpha #wind speed at hub height
-
- #calculate precision of cut in and cut out windspeeds
- d1 = Decimal(str(WTdata['CutInWindSpeed']))
- d2 = Decimal(str(WTdata['CutOutWindSpeed']))
- expo = np.minimum(d1.as_tuple().exponent, d2.as_tuple().exponent)
- precision = 10**(expo-1)
-
- #insert points for cut-in and cut-out wind speeds
- add_ci = 0
- add_co= 0
- if np.mod(WTdata['CutInWindSpeed'],1)==0:
- add_ci = precision
- if np.mod(WTdata['CutOutWindSpeed'],1)==0:
- add_co = precision
- i_cutin = np.where(WTdata['PowerCurve']['WindSpeed']>(WTdata['CutInWindSpeed']+add_ci))[0][0]
- i_cutout = np.where(WTdata['PowerCurve']['WindSpeed']>(WTdata['CutOutWindSpeed']+add_co))[0][0] + 1 #+1 to account for addition of cut in point
- WTdata['PowerCurve']['WindSpeed'] = list(WTdata['PowerCurve']['WindSpeed'])
- WTdata['PowerCurve']['WindSpeed'].insert(i_cutin, WTdata['CutInWindSpeed']+add_ci)
- WTdata['PowerCurve']['WindSpeed'].insert(i_cutout, WTdata['CutOutWindSpeed']+add_co)
- WTdata['PowerCurve']['Power'].insert(i_cutin, 0)
- WTdata['PowerCurve']['Power'].insert(i_cutout, 0)
-
- #change to array type for consistency
- WTdata['PowerCurve']['WindSpeed'] = np.array(WTdata['PowerCurve']['WindSpeed'])
- WTdata['PowerCurve']['Power'] = np.array(WTdata['PowerCurve']['Power'])
-
- #interpolate between values so that precise wind speed data doesnt output heavily discretized power levels
- from scipy import interpolate
- finterp = interpolate.interp1d(WTdata['PowerCurve']['WindSpeed'],WTdata['PowerCurve']['Power'], kind='linear')
- Vnew = np.arange(0,30,precision)
- Pnew = finterp(Vnew)
-
- #calculate power produced by turbine in function of wind speed
- Pac_turbine = []
- for i, ws in enumerate(WS_hh):
- if ws >= 30-precision:
- index = len(Pnew)-1
- else:
- index = int(round(ws/precision)) #index of correct wind speed
- Pac_turbine.append(Pnew[index]) #Power corresponds to value with same index as wind speed vector
- Pac_turbine = np.array(Pac_turbine)
-
- #account for Losses...currently a single loss factor but could imagine implementing a per-point method
- #WakeEffects = 4 #3-8% for a typical farm, 0% for an individual windmill
- #CableResistanceLosses = 1 #1-3% between windmills and electric counter, depending on voltage levels and cable length
- #GridUnavalability = 1
- #WTUnavailability = 3
- #AcousticCurtailment = 1-4
- #Freezing = 0.5
- #LF = (1-WakeEffects/100)*(1-CableResistanceLosses/100) #loss factor
- ACPower = Pac_turbine*(1-PercentLoss/100) #total AC power produced by wind turbine
- ACPowerNorm = ACPower/WTdata['Pmax']
- return ACPowerNorm
-
-def postOPFinitialization(sav_file,all_inputs_init,AdjLoadTables,init_gen=True,init_bus=True,init_fxshnt=True,init_swshnt=True,init_load=True,init_P0=False):
-
- psspy.save(sav_file)
-
- buses_init=all_inputs_init[0]
- lines_init=all_inputs_init[1]
- trans_init=all_inputs_init[2]
- plants_init=all_inputs_init[3]
- loads_init=all_inputs_init[4]
- shunt_init=all_inputs_init[5]
- motors_init=all_inputs_init[6]
- trans3_init=all_inputs_init[7]
- swshunt_init=all_inputs_init[8]
-
- all_inputs_base=read_sav(sav_file)
- buses_base=all_inputs_base[0]
- lines_base=all_inputs_base[1]
- trans_base=all_inputs_base[2]
- plants_base=all_inputs_base[3]
- loads_base=all_inputs_base[4]
- shunt_base=all_inputs_base[5]
- motors_base=all_inputs_base[6]
- trans3_base=all_inputs_base[7]
- swshunt_base=all_inputs_base[8]
-
- _i=psspy.getdefaultint()
- _f=psspy.getdefaultreal()
- _s=psspy.getdefaultchar()
-
- #re-initialize generators to original value
- if init_gen:
- for plant in plants_init:
- busNum = plant[0]
- ID = plant[2]
- P = plant[3]
- Q = plant[4]
- psspy.machine_chng_2(busNum,ID,[_i,_i,_i,_i,_i,_i],\
- [P, Q,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
-
- #re-initialize voltages and angles
- if init_bus:
- for bus in buses_init:
- busNum = bus[0]
- Upu = bus[2]
- angleDegrees = bus[7]
- psspy.bus_chng_3(busNum,[_i,_i,_i,_i],[_f,Upu,angleDegrees,_f,_f,_f,_f],_s)
-
- #re-initialize fixed shunts to original value
- if init_fxshnt:
- for shunt in shunt_base:
- sh_init = next((sh for sh in shunt_init if (sh[0] == shunt[0]) and sh[5]==shunt[5]),'not found')
- if sh_init == 'not found': #this means the added shunt is an adjustable bus shunt with no existing shunt at the same bus
- #initialize the fixed shunt to 0
- ID = shunt[5] #(ID always == 1)
- busNum = shunt[0]
- Qnom = 0
- psspy.shunt_chng(busNum,ID,_i,[_f, Qnom])
- else: #this shunt already existed in initial saved case file, re-initialize to initial value
- ID = sh_init[5]
- busNum = sh_init[0]
- Q = sh_init[2]
- Qnom = sh_init[4]
- psspy.shunt_chng(busNum,ID,_i,[_f, Qnom])
-
- #re-initialize switched shunts to original values
- if init_swshnt:
- for swshunt in swshunt_init:
- busNum = swshunt[0]
- Q = swshunt[2]
- Qnom = swshunt[4]
- psspy.switched_shunt_chng_3(busNum,[_i,_i,_i,_i,_i,_i,_i,_i,_i,_i,_i,_i],[_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,Qnom,_f],"")
-
- #re-initialize loads to original values
- if init_load:
- # for load in loads_init:
- # busNum = load[0]
- # ID = load[5]
- # P = load[1]
- # Q = load[2]
- # psspy.load_chng_4(busNum,ID,[_i,_i,_i,_i,_i,_i],[P, Q,_f,_f,_f,_f])
- for table in AdjLoadTables:
- tableNum = table[0]
- LoadMult = table[1]
- psspy.opf_adjload_tbl(tableNum,[_i,_i,_i],[LoadMult,_f,_f,_f,_f,_f,_f])
-
- #initialize dispatchable generators to P = 0
- if init_P0:
- for gen in GenDispatchData:
- busNum = gen[0]
- ID = gen[1]
- dispatch = gen[2]
- if dispatch>0:
- P=0
- psspy.machine_chng_2(busNum,ID,[_i,_i,_i,_i,_i,_i],\
- [P,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
-
- ##save changes
- psspy.save(sav_file)
-
- return
-
-
-#Fonction permettant de lire les donnees qui nous interessent et de les mettre dans une matrice
-def read_sav(doc):
- psspy.case(doc)
- # Select what to report
- if psspy.bsysisdef(0):
- sid = 0
- else: # Select subsytem with all buses
- sid = -1
-
- flag_bus = 2 #all buses 1 # in-service
- flag_plant = 4 #all machines
- flag_load = 4 #all loads 1 # in-service
- flag_motor = 4 #all motors 1 # in-service
- flag_swsh = 4 #all fixed shunts 1 # in-service
- flag_brflow = 1 # in-service
- owner_brflow = 1 # bus, ignored if sid is -ve
- ties_brflow = 5 # ignored if sid is -ve
- entry = 1 # gives a single entry (each branch once)
-
- #Bus data (number, basekV, pu, name, ...) : PSSe has 3 functions one for integer data, one for real data and one for strings
- istrings = ['number']
- ierr, idata = psspy.abusint(sid, flag_bus, istrings)
- buses=idata
-
- rstrings = ['base','pu']
- ierr, rdata = psspy.abusreal(sid, flag_bus, rstrings)
- buses.append(rdata[0])
- buses.append(rdata[1])
-
- cstrings = ['name']
- ierr, cdata = psspy.abuschar(sid, flag_bus, cstrings)
- cdata[0]=map( lambda s: s.replace("\n"," "),cdata[0])
- buses.append(cdata[0])
-
- rstrings = ['nvlmlo','nvlmhi']
- ierr, rdata = psspy.abusreal(sid, flag_bus, rstrings)
- buses.append(rdata[0])
- buses.append(rdata[1])
-
- istrings = ['type']
- ierr, idata = psspy.abusint(sid, flag_bus, istrings)
- buses.append(idata[0])
-
- rstrings = ['angled']
- ierr, rdata = psspy.abusreal(sid, flag_bus, rstrings)
- buses.append(rdata[0])
-
- buses=zip(*buses) # transpose the matrix
-
- del idata, rdata, istrings, rstrings
-
- #Lines data (from, to, amps, rate%a, ploss, qloss)
- flag=2 #All non-transformer branches
- istrings = ['fromnumber','tonumber']
- ierr, idata = psspy.abrnint(sid, owner_brflow, ties_brflow, flag, entry, istrings)
- lines=idata
-
- rstrings=['amps','pctratea','pctrateb','pctratec','p','q']
- ierr, rdata = psspy.abrnreal(sid, owner_brflow, ties_brflow, flag, entry, rstrings)
- for rc in range (np.matrix(rdata).shape[0]) :
- lines.append(rdata[rc])
-
- cstrings=['fromname','toname','id']
- ierr, cdata = psspy.abrnchar(sid, owner_brflow, ties_brflow, flag, entry, cstrings)
- for rc in range (np.matrix(cdata).shape[0]) :
- cdata[rc]=map( lambda s: s.replace("\n"," "),cdata[rc])
- lines.append(cdata[rc])
-
- #eliminate breakers and switches
- linesAll=zip(*lines) # transpose the matrix
- lines = []
- for line in linesAll:
- if ('@' not in line[10]) and ('*' not in line[10]):
- lines.append(line)
-
- del idata, rdata, istrings, rstrings
-
- #2 windings transformers data (from, to, amps, rate%a, ploss, qloss)
- flag=6 #All transformer branches
- istrings = ['fromnumber','tonumber']
- ierr, idata = psspy.abrnint(sid, owner_brflow, ties_brflow, flag, entry, istrings)
- transf=idata
-
- rstrings=['amps','pctratea','pctrateb','pctratec','p','q']
- ierr, rdata = psspy.abrnreal(sid, owner_brflow, ties_brflow, flag, entry, rstrings)
- for rc in range (np.matrix(rdata).shape[0]) :
- transf.append(rdata[rc])
-
- cstrings=['fromname','toname','id']
- ierr, cdata = psspy.abrnchar(sid, owner_brflow, ties_brflow, flag, entry, cstrings)
- for rc in range (np.matrix(cdata).shape[0]) :
- cdata[rc]=map( lambda s: s.replace("\n"," "),cdata[rc])
- transf.append(cdata[rc])
-
- transf=zip(*transf) # transpose the matrix
-
- del idata, rdata, istrings, rstrings
-
- #3 windings transformers data (from, to, amps, rate%a, ploss, qloss)
- #sid = -1 #assume a subsystem containing all buses in working case
- owner_3flow = 1 #1 = use bus ownership 2 = use tfo ownership
- ties_3flow = 3 #ignored bc sid is negative. 3 = interior subsystem and subsystem tie 3 winding transformers
- flag=3 #all 3 winding transfo windings
- entry = 2 #1=winding 1 bus order, 2=transformer name order
-
- istrings = ['wind1number','wind2number','wind3number', 'wndnum']
- ierr, idata = psspy.awndint(sid, owner_3flow, ties_3flow, flag, entry, istrings)
- transf3 = idata
-
- rstrings=['amps','pctratea','pctrateb','pctratec','p','q']
- ierr, rdata = psspy.awndreal(sid, owner_3flow, ties_3flow, flag, entry, rstrings)
- for rc in range (np.matrix(rdata).shape[0]) :
- transf3.append(rdata[rc])
-
- cstrings=['wind1name','wind2name','wind3name','id']
- ierr, cdata = psspy.awndchar(sid, owner_3flow, ties_3flow, flag, entry, cstrings)
- for rc in range (np.matrix(cdata).shape[0]) :
- cdata[rc]=map( lambda s: s.replace("\n"," "),cdata[rc])
- transf3.append(cdata[rc])
-
- transf3=zip(*transf3) # transpose the matrix
-
- del idata, rdata, istrings, rstrings
-
-
- #Machines data (bus, inservice, number, pgen, qgen, mvabase, pmax, qmax, name)
- istrings = ['number','status']
- ierr, idata = psspy.amachint(sid, flag_plant, istrings)
- plants=idata
-
- cstrings = ['id']
- ierr, cdata = psspy.amachchar(sid, flag_plant, cstrings)
- for rc in range (np.matrix(cdata).shape[0]) :
- plants.append(cdata[rc])
-
- rstrings = ['pgen','qgen','mbase','pmax','qmax']
- ierr, rdata = psspy.amachreal(sid, flag_plant, rstrings)
- for rc in range (np.matrix(rdata).shape[0]) :
- plants.append(rdata[rc])
-
- cstrings = ['name']
- ierr, cdata = psspy.amachchar(sid, flag_plant, cstrings)
- cdata[0]= map( lambda s: s.replace("\n"," "),cdata[0])
- plants.append(cdata[0])
-
- rstrings = ['pmin','qmin']
- ierr, rdata = psspy.amachreal(sid, flag_plant, rstrings)
- for rc in range (np.matrix(rdata).shape[0]) :
- plants.append(rdata[rc])
-
- istrings = ['wmod']
- ierr, idata = psspy.amachint(sid, flag_plant, istrings)
- for rc in range (np.matrix(idata).shape[0]) :
- plants.append(idata[rc])
-
- nb_plants=np.matrix(plants).shape[1]
- for rc in range (0,nb_plants) :
- plants[3][rc]=float(plants[3][rc]*int(plants[1][rc])) # If the plant isn't in service its production is fixed to zero
- plants[4][rc]=float(plants[4][rc]*int(plants[1][rc])) # If the plant isn't in service its production is fixed to zero
-
- plants=zip(*plants) # transpose the matrix
-
- del idata, rdata, cdata
-
- #Loads data (bus, active, reactive, status, name, id)
- istrings = ['number']
- ierr, idata = psspy.aloadint(sid, flag_load, istrings)
- loads=idata
-
- xstrings = ['mvaact']
- ierr, xdata = psspy.aloadcplx(sid, flag_load, xstrings)
- loads.append(np.real(xdata)[0]) # Append the real part of the load
- loads.append(np.imag(xdata)[0]) #Append the imaginary part of the load
-
- istrings = ['status']
- ierr, idata = psspy.aloadint(sid, flag_load, istrings)
- loads.append(idata[0])
-
- cstrings = ['name', 'id']
- ierr, cdata = psspy.aloadchar(sid, flag_load, cstrings)
- cdata[0]=map( lambda s: s.replace("\n"," "),cdata[0])
- loads.append(cdata[0])
- loads.append(cdata[1])
-
- nb_loads=np.matrix(loads).shape[1]
- for rc in range (0,nb_loads) :
- loads[1][rc]=float(loads[1][rc]*int(loads[3][rc])) # If the load isn't in service its consumption is fixed to zero
- loads[2][rc]=float(loads[2][rc]*int(loads[3][rc])) # If the load isn't in service its consumption is fixed to zero
-
- loads=zip(*loads) # transpose the matrix
-
- del idata, cdata, xdata
-
- #Fixed shunt data (number, MVAR, name, ...)
- istrings = ['number','status']
- ierr, idata = psspy.afxshuntint(sid, flag_bus, istrings)
- shunt=idata
-
- xstrings = ['shuntact']
- ierr, xdata = psspy.afxshuntcplx(sid, flag_bus, xstrings)
- shunt.append(np.imag(xdata)[0]) #Append the imaginary part of the shunt
-
- cstrings = ['name']
- ierr, cdata = psspy.afxshuntchar(sid, flag_bus, cstrings)
- cdata[0]=map( lambda s: s.replace("\n"," "),cdata[0])
- shunt.append(cdata[0])
-
- xstrings = ['shuntnom']
- ierr, xdata = psspy.afxshuntcplx(sid, flag_bus, xstrings)
- shunt.append(np.imag(xdata)[0]) #Append the imaginary part of the shunt
-
- cstrings = ['id']
- ierr, cdata = psspy.afxshuntchar(sid, flag_bus, cstrings)
- cdata[0]=map( lambda s: s.replace("\n"," "),cdata[0])
- shunt.append(cdata[0])
-
- nb_sh=np.matrix(shunt).shape[1]
- for rc in range (0,nb_sh) : # If the swshunt isn't in service its MVAR is fixed to zero
- shunt[2][rc]=float(shunt[2][rc]*int(shunt[1][rc]))
- shunt[4][rc]=float(shunt[4][rc]*int(shunt[1][rc]))
-
- shunt=zip(*shunt) # transpose the matrix
-
- del idata, cdata, xdata
-
- #Switched shunt data (number, MVAR, name, ...)
- istrings = ['number','status']
- ierr, idata = psspy.aswshint(sid, flag_swsh, istrings)
- swshunt=idata
- status = np.array(swshunt[1])
-
-
- rstrings = ['bswact']
- ierr, rdata = psspy.aswshreal(sid, flag_swsh, rstrings)
- swshunt.append(rdata[0]) #Append the imaginary part of the load
-
- cstrings = ['name']
- ierr, cdata = psspy.aswshchar(sid, flag_swsh, cstrings)
- #cdata[0]=map( lambda s: s.replace("\n"," "),cdata[0])
- swshunt.append(cdata[0])
-
- rstrings = ['bswnom']
- ierr, rdata = psspy.aswshreal(sid, flag_swsh, rstrings)
- swshunt.append(rdata[0]) #Append the imaginary part of the load
-
- nb_swsh=np.matrix(swshunt).shape[1]
- for rc in range (0,nb_swsh) : # If the swshunt isn't in service its MVAR is fixed to zero
- swshunt[2][rc]=float(swshunt[2][rc]*int(swshunt[1][rc]))
- swshunt[4][rc]=float(swshunt[4][rc]*int(swshunt[1][rc]))
-
- swshunt=zip(*swshunt) # transpose the matrix
-
- del idata, cdata, rdata
-
- #Motors data (bus, active, reactive, status, name, id)
- istrings = ['number']
- ierr, idata = psspy.aindmacint(sid, flag_motor, istrings)
- motors=idata
-
- rstrings = ['p','q']
- ierr, rdata = psspy.aindmacreal(sid, flag_motor, rstrings)
- motors.append(rdata[0]) #Append the real part of the motor load
- motors.append(rdata[1]) #Append the imaginary part of the motor load
-
- istrings = ['status']
- ierr, idata = psspy.aindmacint(sid, flag_motor, istrings)
- motors.append(idata[0])
-
- cstrings = ['name', 'id']
- ierr, cdata = psspy.aindmacchar(sid, flag_motor, cstrings)
- cdata[0]=map( lambda s: s.replace("\n"," "),cdata[0])
- motors.append(cdata[0])
- motors.append(cdata[1])
-
- nb_motors=np.matrix(motors).shape[1]
- for rc in range (0,nb_motors) :
- motors[1][rc]=float(motors[1][rc]*int(motors[3][rc])) # If the load isn't in service its consumption is fixed to zero
- motors[2][rc]=float(motors[2][rc]*int(motors[3][rc])) # If the load isn't in service its consumption is fixed to zero
-
- motors=zip(*motors) # transpose the matrix
-
- del idata, cdata, rdata
-
- return buses, lines, transf, plants, loads, shunt, motors, transf3, swshunt
-
-def MyLogger(x,y,z,logCSVfilename,ite):
- f=open(logCSVfilename, 'a')
- f.write(str(ite)+';')
- f.write(";")
- nx = len(x)
- for i in range(0,nx):
- f.write(str(x[i]))#f.write("%f;" % (x[i]))
- f.write(";")
- f.write(";")
- nz = len(z)
- for i in range(0,nz):
- try:
- f.write("%f;" % (z[i]))
- except:
- f.write(str(z[i])+";")
- f.write(";")
- ny = len(y)
- for j in range(0,ny):
- f.write("%f;" % (y[j]))
- f.write("\n")
- f.close()
-
-# Fonction pour ecrire un fichier de sortie type csv pour chaque type de grandeur de sortie
-def MyMultiLogger (x, y, sizeY, z, ite, folder, day, fich, hour):
- global ny
- y0=0
- for fich in range (np.size(sizeY,0)):
- multilogfilename=folder+"\N"+day+"\Y"+str(fich)+"simulationDClog_"+hour+".csv"
- f=open(multilogfilename, 'a')
- f.write("%f;" % (ite))
- f.write(";")
- nx = len(x)
- for i in range(0,nx):
- f.write("%f;" % (x[i]))
- f.write(";")
- nz = len(z)
- for i in range(0,nz):
- f.write("%f;" % (z[i]))
- f.write(";")
- ny = sizeY[fich]
- for j in range(0,ny):
- f.write("%f;" % (y[j+y0]))
- f.write("\n")
- f.close()
- y0 += ny
- print( "Fichiers "+str(ite)+" enregistres\n\n")
-
-# Analyses graphiques
-def graphical_out (inputSample, outputSampleAll, inputDim, outputDim, montecarlosize) :
- print "\n\n\n Writing graphical analysis files..."
- # A Pairwise scatter plot of the inputs
- myGraph = Graph()
- myPairs = Pairs(inputSample, 'Inputs relations', inputSample.getDescription(), "red", "bullet")
- myGraph.add(Drawable(myPairs))
- myGraph.draw("Input Samples",640,480,GraphImplementation.PDF)
- #View(myGraph.getBitmap())
- print 'Input pairwise scatterplot done...'
-
- # A Pairwise scatter plot of the outputs
- myGraph = Graph()
- myPairs = Pairs(outputSampleAll, 'Output relations', outputSampleAll.getDescription(), "red", "bullet")
- myGraph.add(Drawable(myPairs))
- myGraph.draw("Output Samples",640,480,GraphImplementation.PDF)
- #View(myGraph.getBitmap())
- print 'Output pairwise scatterplot done...'
-
- # A Pairwise scatter plot of the inputs/outputs
- # Draw all scatter plots yj vs xi
- for j in range(outputDim):
- outputSamplej=outputSampleAll.getMarginal(j)
- Ylabelstr=outputSamplej.getDescription()[0]
- for i in range(inputDim):
- inputSamplei=inputSample.getMarginal(i)
- Xlabelstr=inputSamplei.getDescription()[0]
- X=NumericalSample(montecarlosize,2)
- for k in range(montecarlosize):
- X[k,0]=inputSamplei[k][0]
- X[k,1]=outputSamplej[k][0]
- myGraph = Graph()
- myCloud=Cloud(X);
- mytitle=Ylabelstr+"vs"+Xlabelstr
- myGraph.add(Drawable(myCloud))
- myGraph.setAxes(1)
- myGraph.setXTitle(Xlabelstr)
- myGraph.setYTitle(Ylabelstr)
- myGraph.draw(mytitle,640,480,GraphImplementation.PDF)
- #ViewImage(myGraph.getBitmap())
- print 'Input/Output pairwise scatterplot done...'
-
- # An histogram of the inputs
- for i in range(inputDim):
- inputSamplei=inputSample.getMarginal(i)
- myGraph = VisualTest.DrawHistogram(inputSamplei)
- labelarray=inputSamplei.getDescription()
- labelstr=labelarray[0]
- myGraph.setTitle(labelstr)
- myGraph.setName(labelstr)
- myGraph.setXTitle(labelstr)
- myGraph.setYTitle("Frequency")
- myGraph.draw(labelstr,640,480,GraphImplementation.PDF)
- #View(myGraph.getBitmap())
- print 'Input histogram done...'
-
- # An histogram of the outputs
- for j in range(outputDim):
- outputSamplej=outputSampleAll.getMarginal(j)
- myGraph = VisualTest.DrawHistogram(outputSamplej)
- labelarray=outputSamplej.getDescription()
- labelstr=labelarray[0]
- myGraph.setTitle(labelstr)
- myGraph.setName(labelstr)
- myGraph.setXTitle(labelstr)
- myGraph.setYTitle("Frequency")
- myGraph.draw(labelstr,640,480,GraphImplementation.PDF)
- #View(myGraph.getBitmap())
- print 'Output histogram done'
- print 'Graphical output terminated'
-
-
-def config_contingency(LinesList,GroupsList,TransformersList,LoadsList,MotorsList) :
-
- lines_con=[]
- groups_con=[]
- loads_con = []
- transfos_con = []
- motors_con = []
- sizeLines = len(LinesList)
- sizeGroups = len(GroupsList)
- sizeTransfos = len(TransformersList)
- sizeLoads = len(LoadsList)
- sizeMotors = len(MotorsList)
- val=[]
- prob=[]
-
- for i in range(sizeLines+sizeGroups+sizeTransfos + sizeLoads + sizeMotors) :
- val.append(int(i))
- for i in range (sizeLines) :
- lines_con.append(LinesList[i][0])
- prob.append(LinesList[i][1])
- for i in range (sizeGroups) :
- prob.append(GroupsList[i][1])
- groups_con.append(GroupsList[i][0])
- for i in range (sizeTransfos) :
- prob.append(TransformersList[i][1])
- transfos_con.append(TransformersList[i][0])
- for i in range (sizeLoads) :
- prob.append(LoadsList[i][1])
- loads_con.append(LoadsList[i][0])
- for i in range (sizeMotors) :
- prob.append(MotorsList[i][1])
- motors_con.append(MotorsList[i][0])
-
- return lines_con, groups_con, transfos_con, loads_con, motors_con, val, prob
-
-##def config_contingency(LinesPath,GeneratorsPath,TransformersPath,LoadsPath) :
-##
-## lines_con=[]
-## groups_con=[]
-## loads_con = []
-## transfos_con = []
-##
-## # Loading of lines contingency configuration
-## if LinesPath != '':
-## f=open(LinesPath,"r")
-## lines=f.readlines()
-## f.close()
-## for i in range (len(lines)) :
-## line=lines[i].split(";")
-## try :
-## int(line[1])
-## except ValueError :
-## pass
-## else :
-## if line[0] == '' :
-## line[0] = '0'
-## lines_con.append([int(line[1]), int(line[3]), str(line[5]),float(line[0].replace(',','.'))])
-##
-## # Loading of lines contingency configuration
-## if TransformersPath != '':
-## f=open(TransformersPath,"r")
-## lines=f.readlines()
-## f.close()
-## for i in range (len(lines)) :
-## line=lines[i].split(";")
-## try :
-## int(line[1])
-## except ValueError :
-## pass
-## else :
-## if line[0] == '' :
-## line[0] = '0'
-## transfos_con.append([int(line[1]), int(line[3]), str(line[5]),float(line[0].replace(',','.'))])
-##
-## # Loading of groups contingency configuration
-## if GeneratorsPath != '':
-## f=open(GeneratorsPath,"r")
-## lines=f.readlines()
-## f.close()
-## for i in range (len(lines)) :
-## line=lines[i].split(";")
-## try :
-## int(line[1])
-## except ValueError :
-## pass
-## else :
-## if line[0] == '' :
-## line[0] = '0'
-## groups_con.append([int(line[1]), int(line[3]),float(line[0].replace(',','.'))])
-##
-## # Loading of loads contingency configuration
-## if LoadsPath != '':
-## f=open(LoadsPath,"r")
-## lines=f.readlines()
-## f.close()
-## for i in range (len(lines)) :
-## line=lines[i].split(";")
-## try :
-## int(line[1])
-## except ValueError :
-## pass
-## else :
-## if line[0] == '' :
-## line[0] = '0'
-## loads_con.append([int(line[1]), int(line[3]), float(line[0].replace(',','.'))])
-##
-## sizeLines = len(lines_con)
-## sizeGroups = len(groups_con)
-## sizeTransfos = len(transfos_con)
-## sizeLoads = len(loads_con)
-## val=[]
-## prob=[]
-## for i in range(sizeLines+sizeGroups+sizeTransfos + sizeLoads) :
-## val.append(int(i))
-##
-## for i in range (sizeLines) :
-## prob.append(lines_con[i][3])
-## for i in range (sizeGroups) :
-## prob.append(groups_con[i][2])
-## for i in range (sizeTransfos) :
-## prob.append(transfos_con[i][3])
-## for i in range (sizeLoads) :
-## prob.append(loads_con[i][2])
-## return lines_con, groups_con, transfos_con, loads_con, val, prob
-
-def LoadARMA(time_serie_file, time_serie_SS, time_serie_TH) :
- f=open(time_serie_file,"r")
- lines=f.readlines()
- N=len(lines)
- Xt=[]
- for i in range(N) :
- Xt.append([float(lines[i])])
-
- myTG=RegularGrid(0,float(time_serie_SS),N)
- TS=TimeSeries(myTG,NumericalSample(Xt))
- myWN=WhiteNoise(Distribution(Normal(0,1)),myTG)
- myState=ARMAState(TS.getSample(),NumericalSample())
- p=12
- q=0
- d=1
- myFactory = ARMALikelihoodFactory ( p , q , d )
- myARMA = myFactory.build(TS)
-
- myARMA.setState(myState)
-
- AR = myARMA.getARCoefficients()
- MA = myARMA.getMACoefficients()
-
- ts = myARMA.getRealization()
- ts.setName('A realization')
- myTSGraph=ts.drawMarginal(0)
- myTSGraph.draw('Realization'+str(p)+","+str(q),640,480,GraphImplementation.PDF)
- myARMAState=myARMA.getState()
-
- #Make a prediction of the future on next Nit instants
- Nit = int(time_serie_TH)
- myARMA2=ARMA(AR,MA,myWN,myARMAState)
- possibleFuture=myARMA2.getFuture(Nit)
- possibleFuture.setName('Possible future')
-
- Xt2=[]
- for i in range (len(possibleFuture)):
- Xt2.append(possibleFuture.getValueAtIndex(i)[0])
- Max=float(max(Xt2))
- Min=float(min(Xt2))
- h=float(Max-Min)
- for i in range (len(possibleFuture)):
- value= (Xt2[i]-Min+h/3)/(Max-Min+h/3)
- possibleFuture.setValueAtIndex(i,NumericalPoint(1,value))
-
- myFG=possibleFuture.drawMarginal(0)
- myFG.draw('Future'+str(Nit),640,480,GraphImplementation.PDF)
-
- return possibleFuture
-
-def LoadTS(time_serie_file) :
- TS=[]
- for i in range(len(time_serie_file)) :
- if time_serie_file[i] == -1 :
- pass
- else :
- f=open(time_serie_file[i],"r")
- lines=f.readlines()
- N=len(lines)
- Xt=[]
- for j in range(N) :
- try :
- float(lines[i])
- except ValueError :
- lines[i] = commaToPoint(lines[i])
- else :
- pass
- Xt.append([float(lines[j])])
- TS.append(Xt)
- return TS
-
-
-def KSDist(lines) :
- print "Creating Kernel Smoothing distribution "
- N=len(lines)
- Xt=[]
- for i in range(N) :
- if lines[i] == "\n" :
- print "End of file"
- break
- else :
- try :
- float(lines[i])
- except ValueError :
- lines[i] = commaToPoint(lines[i])
- else :
- pass
- Xt.append([float(lines[i])])
- NS=NumericalSample(Xt)
- kernel=KernelSmoothing(Uniform())
- myBandwith = kernel.computeSilvermanBandwidth(NS)
- KS=kernel.build(NS,myBandwith,1)
- return KS
-
-
-def threshold (inputRandomVector, outputVariableOfInterest,pssefun,inputDistribution) :
- # We create a quadraticCumul algorithm
- myQuadraticCumul = QuadraticCumul(outputVariableOfInterest)
-
- # We compute the several elements provided by the quadratic cumul algorithm
- # and evaluate the number of calculus needed
- nbBefr = pssefun.getEvaluationCallsNumber()
-
- # Mean first order
- meanFirstOrder = myQuadraticCumul.getMeanFirstOrder()[0]
- nbAfter1 = pssefun.getEvaluationCallsNumber()
-
- # Mean second order
- meanSecondOrder = myQuadraticCumul.getMeanSecondOrder()[0]
- nbAfter2 = pssefun.getEvaluationCallsNumber()
-
- # Standard deviation
- stdDeviation = sqrt(myQuadraticCumul.getCovariance()[0,0])
- nbAfter3 = pssefun.getEvaluationCallsNumber()
-
- print "First order mean=", myQuadraticCumul.getMeanFirstOrder()[0]
- print "Evaluation calls number = ", nbAfter1 - nbBefr
- print "Second order mean=", myQuadraticCumul.getMeanSecondOrder()[0]
- print "Evaluation calls number = ", nbAfter2 - nbAfter1
- print "Standard deviation=", sqrt(myQuadraticCumul.getCovariance()[0,0])
- print "Evaluation calls number = ", nbAfter3 - nbAfter2
-
- print "Importance factors="
- for i in range(inputRandomVector.getDimension()) :
- print inputDistribution.getDescription()[i], " = ", myQuadraticCumul.getImportanceFactors()[i]
- print ""
-
-def getUserDefined (values):
- val = []
- prob = []
- for a in values:
- val.append(a[0])
- prob.append(a[1])
- dim = len (val)
-
- prob = map(float,prob)
- prob = [p/sum(prob) for p in prob]
-
-## weights = NumericalPoint(prob)
-## Vals = []
-## for i in range(dim):
-## Vals.append([float(val[i]),float(val[i])+0.000001])
-## ranges = NumericalSample(Vals)
-## return UserDefined(ranges, weights)
- coll = UserDefinedPairCollection()
- for i in range (dim) :
- UDpair=UserDefinedPair(NumericalPoint(1,float(val[i])),float(prob[i]))
- coll.add(UDpair)
- return UserDefined(coll)
-
-
-def getHistogram (values) :
- step = []
- prob = []
- for a in values:
- step.append(a[0])
- prob.append(a[1])
- dim = len (step)
- myHistogram = HistogramPairCollection(dim)
- for i in range (dim) :
- try:
- myHistogram[i]=HistogramPair(float(step[i]),float(prob[i]))
- except:
- pass
- return myHistogram
-
-
-
-def getUserLaw(LawDico):
- time_serie = 0
- time_serie_file = ''
- time_serie_SS = 0
- time_serie_TH = 0
- if LawDico['Law']=="Normal":
- law = Normal(float(LawDico['Mu']),float(LawDico['Sigma']))#Openturns
- elif LawDico['Law']=="Uniform":
- law=Uniform(float(LawDico['A']),float(LawDico['B']))
- elif LawDico['Law']=="Exponential":
- law=Exponential(float(LawDico['Lambda']),float(LawDico['Gamma']))
- elif LawDico['Law']=="Weibull":
- if LawDico['Settings']=='AlphaBeta':
- law=Weibull(float(LawDico['Alpha']),float(LawDico['Beta']),float(LawDico['Gamma']))
- elif LawDico['Settings']=='MuSigma':
- law=Weibull(float(LawDico['Mu']),float(LawDico['Sigma']),float(LawDico['Gamma']),Weibull.MUSIGMA)
- elif LawDico['Law']=="TruncatedNormal":
- law=TruncatedNormal(float(LawDico['MuN']),float(LawDico['SigmaN']),float(LawDico['A']),float(LawDico['B']))
- elif LawDico['Law']=="UserDefined":
- law=UserDefined(getUserDefined (LawDico['Values']))
- elif LawDico['Law']=="Histogram":
- law=Histogram(LawDico['First'], getHistogram (LawDico['Values']))
- elif LawDico['Law']=="PDF_from_file":
- law=KSDist(LawDico['FileContents'])
- elif LawDico['Law']=="TimeSeries_from_file":
- law = Uniform(0.999999,1)
- time_serie=1
- time_serie_file=LawDico['FileContents']
- else :
- law = Uniform(0.999999,1)
- return law, [time_serie, time_serie_file] #[time_serie, time_serie_file, time_serie_SS, time_serie_TH]
-
-
-
-
-def contingency_automatic (dfxPath, acccPath, rate) :
- psspy.accc_with_dsp_3( 0.5,[0,0,0,1,1,2,0,0,0,0,0],r"""ALL""",dfxPath,acccPath,"","","")
- psspy.accc_single_run_report_4([1,int(rate),int(rate),1,1,0,1,0,0,0,0,0],[0,0,0,0,6000],[ 0.5, 5.0, 100.0,0.0,0.0,0.0, 99999.],acccPath)
-
- rslt_summary=pssarrays.accc_summary(acccPath)
- if int(rate) == 1 :
- rate = rslt_summary.rating.a
- elif int(rate) == 2 :
- rate = rslt_summary.rating.b
- elif int(rate) == 3 :
- rate = rslt_summary.rating.c
- else :
- print "NO RATE CHOOSEN"
-
- Labels=rlst.colabel
- contin_load=[]
- for label in Labels :
- t=[]
- rslt=pssarrays.accc_solution(acccPath,contingency,label,0.5,5.0)
- ampFlow=rslt.ampflow
- for i in range (len(rA)) :
- t.append(ampFlow[i]/rate[i])
- contin_load.append(t)
- return contin_load
-
-def commaToPoint (string) :
- stringReplaced = string.replace(',','.')
- return stringReplaced
-
-def PSSEFunct(dico,x):
- if 1:
- #try:
-## if dico['TStest']==1:
-## os.chdir(dico['doc_base']) #to work in right directory of the package
-## sys.stdout=open('process num'+str(os.getpid())+'_package '+\
-## str(dico['num_pac'])+'.out','w')
-
- #Get all the dico values
- TStest=dico['TStest']
- sizeY0=dico['sizeY0']
- sizeY1=dico['sizeY1']
- sizeY2=dico['sizeY2']
- sizeY3=dico['sizeY3']
- sizeY4=dico['sizeY4']
- sizeY5=dico['sizeY5']
- sizeY6=dico['sizeY6']
- sizeY7=dico['sizeY7']
- sizeY8=dico['sizeY8']
- sizeY=dico['sizeY']
- Xt=dico['Xt']
- folder=dico['folder']
- folderN_1=dico['folderN_1']
- day=dico['day']
- doc_base=dico['doc_base']
- PSSEParams=dico['PSSEParams']
- _i=dico['_i']
- _f=dico['_f']
- _s=dico['_s']
- if dico['PSSEParams']['I_MAX']=='RateA':
- Irate_num=1
- elif dico['PSSEParams']['I_MAX']=='RateB':
- Irate_num=2
- elif dico['PSSEParams']['I_MAX']=='RateC':
- Irate_num=3
- num_pac=dico['num_pac']
- logCSVfilename=dico['logCSVfilename']
- continLines=dico['continLines']
- continGroups=dico['continGroups']
- continTransfos=dico['continTransfos']
- continLoads=dico['continLoads']
- continMotors=dico['continMotors']
- continVal=dico['continVal']
- continProb=dico['continProb']
- position=dico['position']
- timeVect=dico['timeVect']
- LawsList = dico['CorrMatrix']['laws']
- all_inputs_init = dico['all_inputs_init']
- AdjLoadTables = dico['AdjLoadTables']
-
-
- #initializations
- Output=[]
- LS=[]
- FS=[]
- Pmachine=[]
- LStable=[]
- FStable=[]
-
- LS_beforeUC=[]
- FS_beforeUC=[]
- Pmachine_beforeUC=[]
- LStable_beforeUC=[]
- FStable_beforeUC=[]
- Output_beforeUC = []
-
- outputSampleAll=NumericalSample(0,9)
- inputSample=[]
- redirect.psse2py()
- #import pssdb
- psspy.psseinit(80000)
-
- # Silent execution of PSSe
- islct=6 # 6=no output; 1=standard
- psspy.progress_output(islct)
-
-
- x_copy = []
- for ite in range(len(x)):
- xite = []
- for j in range(len(x[ite])):
- xite.append(x[ite][j])
- x_copy.append(xite)
-
-
- for ite in range(len(x)):
-
- position+=1
- os.chdir(doc_base) #to work in right directory of the package
- # Load data from PSSe
- psspy.case(doc_base+'/BaseCase.sav') #Launching of PSSE and opening the working file
- all_inputs_base=read_sav(doc_base+'/BaseCase.sav')
- buses_base=all_inputs_base[0]
- lines_base=all_inputs_base[1]
- transf_base=all_inputs_base[2]
- plants_base=all_inputs_base[3]
- loads_base=all_inputs_base[4]
- shunt_base=all_inputs_base[5]
- motors_base=all_inputs_base[6]
- transf3_base=all_inputs_base[7]
- swshunt_base=all_inputs_base[8]
- #Calculate Losses:
- P_load = 0
- for load in loads_base:
- P_load += load[1]
- for motor in motors_base:
- P_load+= motor[1]
- P_gen = 0
- for gen in plants_base:
- busnum = gen[0]
- genid = gen[2].strip()
- pgen = gen[3]
- P_gen+=pgen
- Losses = P_gen - P_load
- LossesRatio = (Losses/P_load)*1.25 #overestimate losses to avoid surpassing swing bus capacity after economic dispatch
- doci=os.path.join(doc_base,"Case_"+str(position)+".sav")
- doci_beforeUC = os.path.join(doc_base,"Case_beforeUC_" + str(position) + ".sav")
- psspy.save(doci)
- # Total initial (fixed) shunt on buses
- init_shunt = 0
- for i in range(len(shunt_base)) :
- init_shunt += float(shunt_base[i][2])
- # Configuration de l'OPF a partir des parametres de l'utilisateur
- TapChange = 1-int(dico['PSSEParams']['LOCK_TAPS']) #0 if locked, 1 if stepping
- psspy.report_output(6,"",[0,0]) #6=no output
- if PSSEParams['ALGORITHM']=='Optimum Power Flow':
- if Debug:
- print 'Got to OPF parametrization'
- logfile = os.path.join(doc_base,r"""DETAIL""")
- psspy.produce_opf_log_file(1,logfile)
- psspy.opf_fix_tap_ratios(1-TapChange) #0 : do not fix transformer tap ratios
- psspy.minimize_fuel_cost(int(dico['PSSEParams']['FUEL_COST']))
- psspy.minimize_adj_bus_shunts(int(dico['PSSEParams']['MVAR_COST']))
- psspy.minimize_load_adjustments(int(dico['PSSEParams']['LOADSHEDDING_COST']))
- #psspy.minimize_load_adjustments(False) #block load adjustments during application of laws
- #psspy.initial_opf_barrier_coeff(100)
- #psspy.final_opf_barrier_coeff(0.0001)
- #psspy.opf_step_length_tolerance(0.00001)
- #psspy.opf_fix_all_generators(0)
- psspy.set_opf_report_subsystem(3,1)
- psspy.solution_parameters_4([PSSEParams['ITERATION_LIMIT'],PSSEParams['ITERATION_LIMIT'],PSSEParams['ITERATION_LIMIT'],_i,_i], [_f]*19)
- #[1.6, 1.6, 1, 0.0001, 1, 1, 1, 0.00001, 5, 0.7, 0.0001, 0.005, 1, 0.05, 0.99, 0.99, 1, 0.0001, 100])
-
- else: #economic dispatch
- ecd_file = PSSEParams['ecd_file']
- # 1. Affiche
- nx = len(x[0])
- if TStest==1 :
- for i,law in enumerate(LawsList):
- if Xt[ite][i] == -1 :
- if law != 'N_1_fromFile':
- if 'Availability' in dico['Laws'][law]['Type']:
- status = int(round(x[ite][i])) #idealement on a tiré un chiffre entre 0 et 1, 0 et 1 inclus
- status = min(status,1) #on force status à avoir une valeur 0 ou 1
- status = max(status,0)
- x_copy[ite][i]=status
- if dico['Laws'][law]['ComponentType']=='Generator' and 'Level' in dico['Laws'][law]['Type']:
- if dico['Laws'][law]['TransferFunction']==True:
- if dico['Laws'][law]['TF_Input']=='.pow file':
- z_WS = dico['Laws'][law]['Wind_Speed_Measurement_Height']
- pathWT = dico['Laws'][law]['File_Name']
- HH = dico['Laws'][law]['Hub_Height']
- alpha = dico['Laws'][law]['AlphaWS']
- PercentLoss = dico['Laws'][law]['Percent_Losses']
- x_copy[ite][i]=eol(np.array([x[ite][i]]), z_WS, pathWT, HH, alpha, PercentLoss)[0]
- elif dico['Laws'][law]['TF_Input']=='tuples list':
- x_copy[ite][i]=applyTF(x[ite][i], dico['Laws'][law]['TF_Values'])
- else: #ensure values are between 0 and 1
- Pval = x[ite][i]
- Pval = min(Pval,1)
- Pval = max(Pval,0)
- x_copy[ite][i]=Pval
- else: #law=='N_1_fromFile"
- x_copy[ite][i]==int(floor(x[ite][i]))
-
- else:
- x_copy[ite][i]=float(Xt[ite][i]) # Dans le cas d'une etude temporelle on lui donne la valeur de Xt
-
- else :
- for i,law in enumerate(LawsList):
- if law != 'N_1_fromFile':
- if 'Availability' in dico['Laws'][law]['Type']:
- status = int(round(x[ite][i])) #idealement on a tiré un chiffre entre 0 et 1, 0 et 1 inclus
- status = min(status,1) #on force status à avoir une valeur 0 ou 1
- status = max(status,0)
- x_copy[ite][i]=status
- if dico['Laws'][law]['ComponentType']=='Generator' and 'Level' in dico['Laws'][law]['Type']:
- if dico['Laws'][law]['TransferFunction']==True:
- if dico['Laws'][law]['TF_Input']=='.pow file':
- z_WS = dico['Laws'][law]['Wind_Speed_Measurement_Height']
- pathWT = dico['Laws'][law]['File_Name']
- HH = dico['Laws'][law]['Hub_Height']
- alpha = dico['Laws'][law]['AlphaWS']
- PercentLoss = dico['Laws'][law]['Percent_Losses']
- x_copy[ite][i]=eol(np.array([x[ite][i]]), z_WS, pathWT, HH, alpha, PercentLoss)[0]
- #x_copy[ite][i]=x[ite][i]
- elif dico['Laws'][law]['TF_Input']=='tuples list':
- x_copy[ite][i]=applyTF(x[ite][i], dico['Laws'][law]['TF_Values'])
- else: #ensure values are between 0 and 1
- Pval = x[ite][i]
- Pval = min(Pval,1)
- Pval = max(Pval,0)
- x_copy[ite][i]=Pval
- else: #law=='N_1_fromFile"
- x_copy[ite][i]==int(floor(x[ite][i]))
- inputSample.append(np.array(x[ite]))
-
- if PSSEParams['ALGORITHM']=='Optimum Power Flow':
- #get OPF data
- allbus=1
- include = [1,1,1,1] #isolated buses, out of service branches, subsystem data, subsystem tie lines
- out = 0 #out to file, not window
- # if psspy.bsysisdef(0):
- # sid = 0
- # else: # Select subsytem with all buses
- # sid = -1
- sid = 3
- RopFile = os.path.join(dico['doc_base'],"BaseCase.rop" )
- AlreadyRop=os.path.isfile(RopFile)
- if not AlreadyRop:
- ierr = psspy.rwop(sid,allbus,include,out,RopFile) #write rop file
-
- GenDispatchData, DispTableData, LinCostTables, QuadCostTables, PolyCostTables, GenReserveData, PeriodReserveData,AdjBusShuntData,AdjLoadTables = readOPFdata(RopFile)
-
- if Debug:
- print "Starting application of laws"
-
- # 2. Fait le calcul avec PSSE
-
- #Editing some values in the PSSE .sav input file
- x2 = [] #list to store sampled values for logger function
- for i,law in enumerate(LawsList):
- if law != 'N_1_fromFile':
-
- #Reserve Constraint Law: change level of required reserve for a period reserve constraint
- if dico['Laws'][law]['ComponentType']=='Reserve Constraint':
- ReserveID = dico['Laws'][law]['ReserveID']
- ReserveFound = False
- ReserveActive=False
- for PRD in PeriodReserveData:
- if PRD[0] == ReserveID:
- ReserveFound=True
- ReserveActive=PRD[3]
- if not ReserveFound:
- print 'ALERT: ReserveID ', str(ReserveID), ' is not found. User must define period reserve in .sav file before incluing a distribution on the reserve constraint in PSEN.'
- elif not ReserveActive:
- print 'ALERT: Spinning Reserve Correction entered in PSEN, but ReserveID ', str(ReserveID), ' is not activated in PSS/E.'
- else:
- status=_i #enabled/not enabled
- level=x_copy[ite][i] #MW
- timeframe = _f #minutes
- psspy.opf_perrsv_main(ReserveID,status,[level, timeframe]) #change reserve constraint level
- x2.append(x_copy[ite][i]) #store values for logger function
-
- # Load Law: change the values of the different loads and treat large changes of load to help convergence
- #if dico['Laws'][law]['ComponentType']=='Load' and ('N_1' not in law) and ('out' not in law.lower()):
- if dico['Laws'][law]['ComponentType']=='Load' and ('Availability' not in dico['Laws'][law]['Type']):
- LoadList = dico['Laws'][law]['Load']
- if x_copy[ite][i] > 0.75 : # On change directement l(es) charge(s)
- for LoadName in LoadList:
- busNum = dico['Loads'][LoadName]['NUMBER']
- ID = dico['Loads'][LoadName]['ID']
- P = dico['Loads'][LoadName]['P']
- Q = dico['Loads'][LoadName]['Q']
- psspy.load_chng_4(busNum,ID,[_i,_i,_i,_i,_i,_i],[x_copy[ite][i]*P,x_copy[ite][i]*Q,_f,_f,_f,_f])
-
- elif x_copy[ite][i] > 0.4 : # On effectue un pretraitement en passant par une charge intermediaire
- for LoadName in LoadList:
- busNum = dico['Loads'][LoadName]['NUMBER']
- ID = dico['Loads'][LoadName]['ID']
- P = dico['Loads'][LoadName]['P']
- Q = dico['Loads'][LoadName]['Q']
- psspy.load_chng_4(busNum,ID,[_i,_i,_i,_i,_i,_i],[(1+x_copy[ite][i])/2*P,(1+x_copy[ite][i])/2*Q,_f,_f,_f,_f])
- if PSSEParams['ALGORITHM']=='Optimum Power Flow':
- if Debug:
- print 'OPF load 1'
- psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
- psspy.set_opf_report_subsystem(3,0)
- psspy.nopf(0,1) # Lancement OPF
- postOPFinitialization(doci,all_inputs_init,AdjLoadTables,init_gen=False,init_bus=False,init_fxshnt=True,init_swshnt=False,init_load=True,init_P0=False)
- #psspy.fnsl([0, _i, 0, 0, 0, 0, _i,_i]) # Load flow Newton Raphson
- else:
- if Debug:
- print "Economic Dispatch load 1"
- #economic dispatch
- EcdErrorCodes, LFcode, Plimit, Qlimit = EconomicDispatch(doci, ecd_file, LossesRatio, TapChange)
- if Debug:
- print "Returned from EconomicDispatch"
- if np.any(np.array(EcdErrorCodes)!=0):
- print "Error in economic dispatch."
- for LoadName in LoadList : # On change toutes les charges
- busNum = dico['Loads'][LoadName]['NUMBER']
- ID = dico['Loads'][LoadName]['ID']
- P = dico['Loads'][LoadName]['P']
- Q = dico['Loads'][LoadName]['Q']
- psspy.load_chng_4(busNum,ID,[_i,_i,_i,_i,_i,_i],[x_copy[ite][i]*P,x_copy[ite][i]*Q,_f,_f,_f,_f])
-
- else : # On effectue un pretraitement en passant par une charge intermediaire
- for LoadName in LoadList:
- busNum = dico['Loads'][LoadName]['NUMBER']
- ID = dico['Loads'][LoadName]['ID']
- P = dico['Loads'][LoadName]['P']
- Q = dico['Loads'][LoadName]['Q']
- psspy.load_chng_4(busNum,ID,[_i,_i,_i,_i,_i,_i],[0.7*P,0.7*Q,_f,_f,_f,_f])
-
- if PSSEParams['ALGORITHM']=='Optimum Power Flow':
- if Debug:
- print 'OPF load 2a'
- psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
- psspy.set_opf_report_subsystem(3,0)
- psspy.nopf(0,1) # Lancement OPF
- postOPFinitialization(doci,all_inputs_init,AdjLoadTables,init_gen=False,init_bus=False,init_fxshnt=True,init_swshnt=False,init_load=True,init_P0=False)
- #psspy.fnsl([0, _i, 0, 0, 0, 0, _i,_i]) # Load flow Newton Raphson
- else:
- if Debug:
- print "Economic Dispatch load 2"
- #economic dispatch
- EcdErrorCodes, LFcode, Plimit, Qlimit = EconomicDispatch(doci, ecd_file, LossesRatio, TapChange)
- if np.any(np.array(EcdErrorCodes)!=0):
- print "Error in economic dispatch."
-
- for LoadName in LoadList : # On change toutes les charges
- busNum = dico['Loads'][LoadName]['NUMBER']
- ID = dico['Loads'][LoadName]['ID']
- P = dico['Loads'][LoadName]['P']
- Q = dico['Loads'][LoadName]['Q']
- psspy.load_chng_4(busNum,ID,[_i,_i,_i,_i,_i,_i],[0.4*P,0.4*Q,_f,_f,_f,_f])
- if PSSEParams['ALGORITHM']=='Optimum Power Flow':
- if Debug:
- print 'OPF load 2b'
- psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
- psspy.set_opf_report_subsystem(3,0)
- psspy.nopf(0,1) # Lancement OPF
- postOPFinitialization(doci,all_inputs_init,AdjLoadTables,init_gen=False,init_bus=False,init_fxshnt=True,init_swshnt=False,init_load=True,init_P0=False)
- #psspy.fnsl([0, _i, 0, 0, 0, 0, _i,_i]) # Load flow Newton Raphson
- else:
- #economic dispatch
- EcdErrorCodes, LFcode, Plimit, Qlimit = EconomicDispatch(doci, ecd_file, LossesRatio, TapChange)
- if np.any(np.array(EcdErrorCodes)!=0):
- print "Error in economic dispatch."
- if Debug:
- print "Economic Dispatch load 2"
- for LoadName in LoadList : # On change toutes les charges
- busNum = dico['Loads'][LoadName]['NUMBER']
- ID = dico['Loads'][LoadName]['ID']
- P = dico['Loads'][LoadName]['P']
- Q = dico['Loads'][LoadName]['Q']
- psspy.load_chng_4(busNum,ID,[_i,_i,_i,_i,_i,_i],[x_copy[ite][i]*P,x_copy[ite][i]*Q,_f,_f,_f,_f])
- x2.append(x_copy[ite][i]) #store values sampled for logger function
- # Motor Load Law: change the values of the different induction motor loads and treat large changes of load to help convergence
- #if dico['Laws'][law]['ComponentType']=='Motor' and ('N_1' not in law) and ('out' not in law.lower()):
- if dico['Laws'][law]['ComponentType']=='Motor' and ('Availability' not in dico['Laws'][law]['Type']):
- MotorList = dico['Laws'][law]['Motor']
- if x_copy[ite][i] > 0.75 : # On change directement l(es) charge(s)
- for MotorName in MotorList:
- busNum = dico['Motors'][MotorName]['NUMBER']
- ID = dico['Motors'][MotorName]['ID']
- Mbase = dico['Motors'][MotorName]['MBASE']
- BaseCode = dico['Motors'][MotorName]['BASECODE']
- Pinit = dico['Motors'][MotorName]['P']
- Qinit = dico['Motors'][MotorName]['Q']
- if BaseCode==2: #max is in MVA
- PF = Pinit/((Pinit**2+Qinit**2)**0.5)
- Pmax = PF*Mbase
- else:
- Pmax = Mbase
- I_list = [_i]*9
- F_list = [_f]*23
- F_list[2]=x_copy[ite][i]*Pmax
- psspy.induction_machine_chng(busNum,ID,I_list,F_list)
-
- elif x_copy[ite][i] > 0.4 : # On effectue un pretraitement en passant par une charge intermediaire
- for MotorName in MotorList:
- busNum = dico['Motors'][MotorName]['NUMBER']
- ID = dico['Motors'][MotorName]['ID']
- Mbase = dico['Motors'][MotorName]['MBASE']
- BaseCode = dico['Motors'][MotorName]['BASECODE']
- Pinit = dico['Motors'][MotorName]['P']
- Qinit = dico['Motors'][MotorName]['Q']
- if BaseCode==2: #max is in MVA
- PF = Pinit/((Pinit**2+Qinit**2)**0.5)
- Pmax = PF*Mbase
- else:
- Pmax = Mbase
- I_list = [_i]*9
- F_list = [_f]*23
- F_list[2]=x_copy[ite][i]*Pmax*0.7
- psspy.induction_machine_chng(busNum,ID,I_list,F_list)
- if PSSEParams['ALGORITHM']=='Optimum Power Flow':
- if Debug:
- print 'OPF motor load 1'
- psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
- psspy.set_opf_report_subsystem(3,0)
- psspy.nopf(0,1) # Lancement OPF
- postOPFinitialization(doci,all_inputs_init,AdjLoadTables,init_gen=False,init_bus=False,init_fxshnt=True,init_swshnt=False,init_load=True,init_P0=False)
- #psspy.fnsl([0, _i, 0, 0, 0, 0, _i,_i]) # Load flow Newton Raphson
- else:
- #economic dispatch
- EcdErrorCodes, LFcode, Plimit, Qlimit = EconomicDispatch(doci, ecd_file, LossesRatio, TapChange)
- if np.any(np.array(EcdErrorCodes)!=0):
- print "Error in economic dispatch."
-
- for MotorName in MotorList:
- busNum = dico['Motors'][MotorName]['NUMBER']
- ID = dico['Motors'][MotorName]['ID']
- Mbase = dico['Motors'][MotorName]['MBASE']
- BaseCode = dico['Motors'][MotorName]['BASECODE']
- Pinit = dico['Motors'][MotorName]['P']
- Qinit = dico['Motors'][MotorName]['Q']
- if BaseCode==2: #max is in MVA
- PF = Pinit/((Pinit**2+Qinit**2)**0.5)
- Pmax = PF*Mbase
- else:
- Pmax = Mbase
- I_list = [_i]*9
- F_list = [_f]*23
- F_list[2]=x_copy[ite][i]*Pmax
- psspy.induction_machine_chng(busNum,ID,I_list,F_list)
-
- else : # On effectue un pretraitement en passant par une charge intermediaire
- for MotorName in MotorList:
- busNum = dico['Motors'][MotorName]['NUMBER']
- ID = dico['Motors'][MotorName]['ID']
- Mbase = dico['Motors'][MotorName]['MBASE']
- BaseCode = dico['Motors'][MotorName]['BASECODE']
- Pinit = dico['Motors'][MotorName]['P']
- Qinit = dico['Motors'][MotorName]['Q']
- if BaseCode==2: #max is in MVA
- PF = Pinit/((Pinit**2+Qinit**2)**0.5)
- Pmax = PF*Mbase
- else:
- Pmax = Mbase
- I_list = [_i]*9
- F_list = [_f]*23
- F_list[2]=x_copy[ite][i]*Pmax*0.7
- psspy.induction_machine_chng(busNum,ID,I_list,F_list)
- if PSSEParams['ALGORITHM']=='Optimum Power Flow':
- if Debug:
- print 'OPF motor load 2a'
- psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
- psspy.set_opf_report_subsystem(3,0)
- psspy.nopf(0,1) # Lancement OPF
- postOPFinitialization(doci,all_inputs_init,AdjLoadTables,init_gen=False,init_bus=False,init_fxshnt=True,init_swshnt=False,init_load=True,init_P0=False)
- #psspy.fnsl([0, _i, 0, 0, 0, 0, _i,_i]) # Load flow Newton Raphson
- else:
- #economic dispatch
- EcdErrorCodes, LFcode, Plimit, Qlimit = EconomicDispatch(doci, ecd_file, LossesRatio, TapChange)
- if np.any(np.array(EcdErrorCodes)!=0):
- print "Error in economic dispatch."
-
- for MotorName in MotorList:
- busNum = dico['Motors'][MotorName]['NUMBER']
- ID = dico['Motors'][MotorName]['ID']
- Mbase = dico['Motors'][MotorName]['MBASE']
- BaseCode = dico['Motors'][MotorName]['BASECODE']
- Pinit = dico['Motors'][MotorName]['P']
- Qinit = dico['Motors'][MotorName]['Q']
- if BaseCode==2: #max is in MVA
- PF = Pinit/((Pinit**2+Qinit**2)**0.5)
- Pmax = PF*Mbase
- else:
- Pmax = Mbase
- I_list = [_i]*9
- F_list = [_f]*23
- F_list[2]=x_copy[ite][i]*Pmax*0.4
- psspy.induction_machine_chng(busNum,ID,I_list,F_list)
- if PSSEParams['ALGORITHM']=='Optimum Power Flow':
- if Debug:
- print 'OPF motor load 2b'
- psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
- psspy.set_opf_report_subsystem(3,0)
- psspy.nopf(0,1) # Lancement OPF
- postOPFinitialization(doci,all_inputs_init,AdjLoadTables,init_gen=False,init_bus=False,init_fxshnt=True,init_swshnt=False,init_load=True,init_P0=False)
- #psspy.fnsl([0, _i, 0, 0, 0, 0, _i,_i]) # Load flow Newton Raphson
- else:
- #economic dispatch
- EcdErrorCodes, LFcode, Plimit, Qlimit = EconomicDispatch(doci, ecd_file, LossesRatio, TapChange)
- if np.any(np.array(EcdErrorCodes)!=0):
- print "Error in economic dispatch."
-
- for MotorName in MotorList:
- busNum = dico['Motors'][MotorName]['NUMBER']
- ID = dico['Motors'][MotorName]['ID']
- Mbase = dico['Motors'][MotorName]['MBASE']
- BaseCode = dico['Motors'][MotorName]['BASECODE']
- Pinit = dico['Motors'][MotorName]['P']
- Qinit = dico['Motors'][MotorName]['Q']
- if BaseCode==2: #max is in MVA
- PF = Pinit/((Pinit**2+Qinit**2)**0.5)
- Pmax = PF*Mbase
- else:
- Pmax = Mbase
- I_list = [_i]*9
- F_list = [_f]*23
- F_list[2]=x_copy[ite][i]*Pmax
- psspy.induction_machine_chng(busNum,ID,I_list,F_list)
- x2.append(x_copy[ite][i]) #store values sampled for logger function
- # Generator Law : Change generation level
- #if dico['Laws'][law]['ComponentType']=='Generator' and ('N_1' not in law) and ('out' not in law.lower()):
- if dico['Laws'][law]['ComponentType']=='Generator' and ('Availability' not in dico['Laws'][law]['Type']):
- GenList = dico['Laws'][law]['Generator']
- for GenName in GenList:
- busNum = dico['Generators'][GenName]['NUMBER']
- ID = dico['Generators'][GenName]['ID']
- Pmax = dico['Generators'][GenName]['PMAX']
- Pmin = dico['Generators'][GenName]['PMIN']
- if Pmin < 0 and abs(Pmin) > Pmax: #motor, not generator
- psspy.machine_chng_2(busNum,ID,[_i,_i,_i,_i,_i,_i],\
- [x_copy[ite][i]*Pmin,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
- else: #generator
- psspy.machine_chng_2(busNum,ID,[_i,_i,_i,_i,_i,_i],\
- [x_copy[ite][i]*Pmax,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
- x2.append(x_copy[ite][i]) #store values sampled for logger function
- #Line or Transformer Availability Law: disconnect component if sample=0
- elif dico['Laws'][law]['ComponentType']=='Line' or dico['Laws'][law]['ComponentType']=='Transformer':
- compType = dico['Laws'][law]['ComponentType']
- CompList = dico['Laws'][law][compType]
-
- for Name in CompList:
- from_bus = dico[compType + 's'][Name]['FROMNUMBER']
- to_bus = dico[compType+ 's'][Name]['TONUMBER']
-
- ID = dico[compType+ 's'][Name]['ID']
- if compType=='Line':
- psspy.branch_chng(from_bus,to_bus,ID,[x_copy[ite][i],_i,_i,_i,_i,_i],\
- [_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
- elif compType=='Transformer':
- if dico[compType+ 's'][Name]['#WIND']==2:
- i_args = [_i]*15
- i_args[0]=status
- f_args = [_f]*24
- c_args = [_s]*2
- psspy.two_winding_chng_4(from_bus,to_bus,ID,i_args,f_args,c_args)
- elif dico[compType+ 's'][Name]['#WIND']==3:
- three_bus = dico[compType + 's'][Name]['3NUMBER']
- i_args = [_i]*12
- i_args[7]=status
- f_args = [_f]*17
- c_args = [_s]*2
- psspy.three_wnd_imped_chng_3(from_bus,to_bus,three_bus,ID,i_args,f_args,c_args)
- x2.append(x_copy[ite][i]) #store values sampled for logger function
-
- #Generator or Load or Motor Availability Law: disconnect component if sample = 0
- #elif (dico['Laws'][law]['ComponentType']=='Generator' and ('N_1' in law or 'out' in law.lower())) or\
- # (dico['Laws'][law]['ComponentType']=='Load' and ('N_1' in law or 'out' in law.lower())) or\
- # (dico['Laws'][law]['ComponentType']=='Motor' and ('N_1' in law or 'out' in law.lower())):
- elif (dico['Laws'][law]['ComponentType']=='Generator' and ('Availability' in dico['Laws'][law]['Type'])) or\
- (dico['Laws'][law]['ComponentType']=='Load' and ('Availability' in dico['Laws'][law]['Type'])) or\
- (dico['Laws'][law]['ComponentType']=='Motor' and ('Availability' in dico['Laws'][law]['Type'])):
- compType = dico['Laws'][law]['ComponentType']
- CompList = dico['Laws'][law][compType]
-
- for Name in CompList:
- busNum = dico[compType + 's'][Name]['NUMBER']
- ID = dico[compType + 's'][Name]['ID']
- if compType=='Generator':
- psspy.machine_chng_2(busNum,ID,[x_copy[ite][i],_i,_i,_i,_i,_i],\
- [_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
- elif compType=='Load':
- psspy.load_chng_4(busNum,ID,[x_copy[ite][i],_i,_i,_i,_i,_i],[_f,_f,_f,_f,_f,_f])
-
- elif compType=='Motor':
- psspy.induction_machine_chng(busNum,ID,[x_copy[ite][i],_i,_i,_i,_i,_i,_i,_i,_i],[_f]*23)
- x2.append(x_copy[ite][i]) #store values sampled for logger function
-
- #N-1 from file : systematic disconnection of a component
- else: #law='N_1_fromFile'
- if x_copy[ite][i]<0:
- x2.append("")
- pass
- elif x_copy[ite][i] < len(continLines) : # L'element tire est une ligne
-
- line_num=int(x_copy[ite][i])
- line_name=continLines[int(line_num)]
-
- from_bus=dico['Lines'][line_name]['FROMNUMBER']
- to_bus=dico['Lines'][line_name]['TONUMBER']
- br_id=dico['Lines'][line_name]['ID']
- psspy.branch_chng(from_bus,to_bus,br_id,[0,_i,_i,_i,_i,_i],\
- [ _f, _f, _f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
- x2.append('Line '+str(from_bus)+'-'+str(to_bus)+'#'+str(br_id))
-
- elif x_copy[ite][i] < (len(continLines)+len(continGroups)) :
-
- group_num = int(x_copy[ite][i])-len(continLines)
- group_name = continGroups[int(group_num)]
- bus_num = dico['Generators'][group_name]['NUMBER']
- bus_id = dico['Generators'][group_name]['ID']
- psspy.machine_chng_2(int(bus_num),str(bus_id),[0,_i,_i,_i,_i,_i],\
- [_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f]) # Disconnect component
- psspy.opf_gendsp_indv(int(bus_num),str(bus_id),_i,0.0)
- x2.append('Group '+str(bus_num)+'#'+str(bus_id))
-
- elif x_copy[ite][i] < (len(continLines)+len(continGroups)+len(continTransfos)) :
- transfo_num=int(x_copy[ite][i])-len(continLines)-len(continGroups)
- transfo_name = continTransfos[int(transfo_num)]
- from_bus= dico['Transformers'][transfo_name]['FROMNUMBER']
- to_bus=dico['Transformers'][transfo_name]['TONUMBER']
- ID=dico['Transformers'][transfo_name]['ID']
-
- if dico['Transformers'][transfo_name]['#WIND']==2:
- i_args = [_i]*15
- i_args[0]=0
- f_args = [_f]*24
- c_args = [_s]*2
- psspy.two_winding_chng_4(from_bus,to_bus,ID,i_args,f_args,c_args)
- x2.append('Transfo '+str(from_bus)+'-'+str(to_bus)+'#'+str(ID))
-
- elif dico['Transformers'][transfo_name]['#WIND']==3:
- three_bus = dico['Transformers'][transfo_name]['3NUMBER']
- i_args = [_i]*12
- i_args[7]=0
- f_args = [_f]*17
- c_args = [_s]*2
- psspy.three_wnd_imped_chng_3(from_bus,to_bus,three_bus,ID,i_args,f_args,c_args)
- x2.append('Transfo '+str(from_bus)+'-'+str(to_bus)+'-'+str(three_bus)+'#'+str(ID))
-
- elif x_copy[ite][i] < (len(continLines)+len(continGroups)+len(continTransfos)+len(continLoads)) :
-
- load_num = int(x_copy[ite][i])-len(continLines)-len(continGroups)-len(continTransfos)
- load_name = continLoads[int(load_num)]
- bus_num = dico['Loads'][load_name]['NUMBER']
- ID = dico['Loads'][load_name]['ID']
- psspy.load_chng_4(int(bus_num),str(ID),[0,_i,_i,_i,_i,_i],[_f,_f,_f,_f,_f,_f]) # Disconnect component
- x2.append('Load '+str(bus_num)+'#'+str(ID))
-
- elif x_copy[ite][i] < (len(continLines)+len(continGroups)+len(continTransfos)+len(continLoads)+len(continMotors)) :
- motor_num = int(x_copy[ite][i])-len(continLines)-len(continGroups)-len(continTransfos)-len(continLoads)
- motor_name = continMotors[int(motor_num)]
- bus_num = dico['Motors'][motor_name]['NUMBER']
- ID = dico['Motors'][motor_name]['ID']
- psspy.induction_machine_chng(int(bus_num),str(ID),[0,_i,_i,_i,_i,_i,_i,_i,_i],[_f]*23) # Disconnect component
- x2.append('Motor '+str(bus_num)+'#'+str(ID))
- else :
- pass
-
- psspy.save(doci) #Saving .sav modifications
-
- if PSSEParams['ALGORITHM']=='Optimum Power Flow':
- #save OPF data
- allbus=1
- include = [1,1,1,1] #isolated buses, out of service branches, subsystem data, subsystem tie lines
- out = 0 #out to file, not window
- # if psspy.bsysisdef(0):
- # sid = 0
- # else: # Select subsytem with all buses
- # sid = -1
- sid = 3
- RopFile = os.path.join(dico['doc_base'],"BaseCase.rop" )
- AlreadyRop=os.path.isfile(RopFile)
- if not AlreadyRop:
- ierr = psspy.rwop(sid,allbus,include,out,RopFile) #write rop file
-
- ok = True
-
- if Debug:
- print "Finished applying laws"
- loadShed = []
- fxshnt = []
- indexLS = []
- indexFS = []
- indicLS = 0
- indicFS = 0
- xstrings = ['mvaact']
- ierr, xdata1 = psspy.aloadcplx(-1, 1, xstrings)
- istrings = ['number']
- ierr, idata = psspy.aloadint(-1, 1, istrings)
- cstrings = ['name']
- ierr, cdata = psspy.aloadchar(-1, 1, cstrings)
- bistrings = ['number']
- ierr, bidata1 = psspy.afxshuntint(-1, 1, bistrings)
- bxstrings = ['shuntnom']
- ierr, bxdata1 = psspy.afxshuntcplx(-1, 1, bxstrings)
- bcstrings = ['id']
- ierr, bcdata1 = psspy.afxshuntchar(-1, 1, bcstrings)
- #Unit commitment pass only valid for OPF (economic dispatch turns on and off generators)
- ##=========================================================================#
- if PSSEParams['ALGORITHM']=='Optimum Power Flow':
- # First OPF to disconnect all generators at P=0
- if dico['UnitCommitment']:
- #increase load by reserve level so that after unit commitment there are enough groups to provide reserve
- GenDispatchData, DispTableData, LinCostTables, QuadCostTables, PolyCostTables, GenReserveData, PeriodReserveData,AdjBusShuntData,AdjLoadTables = readOPFdata(RopFile)
- ReserveFound=False
- TotalReserveLevel = 0
- AllReserveActive = []
- for num in range(1,16): #16 potential reserves defined in OPF
- keyname = 'SpinningReserveID_'+str(int(num))
- if PSSEParams.has_key(keyname):
- ReserveID = PSSEParams[keyname]
- for PRD in PeriodReserveData:
- if PRD[0]==ReserveID:
- ReserveFound=True
- ReserveActive = PRD[3]
- ReserveLevel = PRD[1]
- AllReserveActive.append(ReserveActive)
- TotalReserveLevel += ReserveActive*ReserveLevel
- #print('Total Reserve = ', str(TotalReserveLevel))
- if ReserveFound and any(AllReserveActive):
- outputs = read_sav(doci)
- loads = outputs[4]
- total_load = 0
- for load in loads:
- total_load += load[1]
-
- x_with_reserve = (total_load + TotalReserveLevel)/total_load
- x_remove_reserve = 1.0/x_with_reserve
- for load in loads:
- busNum = load[0]
- ID = load[5]
- P = load[1]
- Q = load[2]
- psspy.load_chng_4(busNum,ID,[_i,_i,_i,_i,_i,_i],[x_with_reserve*P,x_with_reserve*Q,_f,_f,_f,_f])
-
- #set Pmin so necessary units to supply reserve are not disconnected
- if ReserveCorrection:
- NoDisconnectionAllowedTotal = []
- for res in PeriodReserveData:
- ResNum = res[0]
- ResLevel = res[1]
- ResPeriod = res[2]
- InService = res[3]
- if InService == 0:
- continue
- ParticipatingUnits = res[4]
- ParticipatingUnitsFull = []
- NoDisconnectionAllowed = []
- for unit in ParticipatingUnits:
- busNum = unit[0]
- ID = unit[1]
-
- for gen in GenReserveData:
- busNum2 = gen[0]
- ID2 = gen[1]
- if busNum==busNum2 and ID == ID2:
- ramp =gen[2]
- #Pmax = gen[3]
- break
-
- for gen in GenDispatchData:
- busNum3 = gen[0]
- ID3 = gen[1]
- if busNum==busNum3 and ID == ID3:
- dispatch = gen[2]
- dispTable = gen[3]
- break
-
- for dTable in DispTableData:
- dispTable2 = dTable[0]
- if dispTable == dispTable2:
- PmaxTable = dTable[1]
- Pmax = PmaxTable #take Pmax from dispatch table to avoid errors
- PminTable = dTable[2]
- FuelCostScaleCoef = dTable[3]
- CurveType = dTable[4] #2 = piece wise linear,
- Status = dTable[5]
- CostTable = dTable[6]
- break
-
- for table in LinCostTables:
- CostTable2 = table[0]
- if CostTable2==CostTable:
- numpoints = table[1]
- points = table[2]
- break
-
- MaxContribution = min(ResPeriod * ramp, Pmax)
-
- for i,[x_,y_] in enumerate(points):
- if x_ > Pmax:
- x1 = x_
- y1 = y_
- x0 = points[i-1][0]
- y0 = points[i-1][1]
- break
- y_i = (y1 - y0)*Pmax/(x1-x0)
-
- if Pmax > 0:
- CostCoef = y_i / Pmax
- else:
- #pdb.set_trace()
- CostCoef = 0
-
- ParticipatingUnitsFull.append([busNum, ID, Pmax, dispTable, MaxContribution, CostCoef])
-
- ParticipatingUnitsFull.sort(key=lambda d: d[-1], reverse=False)
- ReserveCapability = 0
-
- for unit in ParticipatingUnitsFull:
- MaxContribution = unit[4]
- if ReserveCapability >= ResLevel:
- break
- else:
- ReserveCapability += MaxContribution
- dispTable = unit[3]
- Pmax = unit[2]
- busNum = unit[0]
- ID = unit[1]
- NoDisconnectionAllowed.append([busNum, ID])
- Pmin = (DisconnectThreshhold*1.1)*Pmax
- psspy.opf_apdsp_tbl(dispTable,[_i,_i,_i],[_f, Pmin,_f])
-
- for grp in NoDisconnectionAllowed:
- if grp not in NoDisconnectionAllowedTotal:
- NoDisconnectionAllowedTotal.append(grp)
-
- else:
- pass
-
- #psspy.minimize_load_adjustments(int(dico['PSSEParams']['LOADSHEDDING_COST'])) #now apply load shedding
- #save new load levels to be able to initialize after opf run
- psspy.save(doci) #Saving .sav modifications
- all_inputs_base=read_sav(doci)
- loads_base=all_inputs_base[4]
- all_inputs_init_i =[]
- for h, inputs in enumerate(all_inputs_init):
- if h != 4:
- all_inputs_init_i.append(inputs)
- else:
- all_inputs_init_i.append(loads_base)
-
-
- if PSSEParams['ALGORITHM']=='Optimum Power Flow':
- # First OPF to disconnect all generators at P=0
- if dico['UnitCommitment']:
-
- if Debug:
- print "principal OPF before unit commitment"
-
- loadShed = []
- fxshnt = []
-
- indexLS = []
- indexFS = []
-
- indicLS = 0
- indicFS = 0
-
- xstrings = ['mvaact']
- ierr, xdata1 = psspy.aloadcplx(-1, 1, xstrings)
- istrings = ['number']
- ierr, idata = psspy.aloadint(-1, 1, istrings)
- cstrings = ['name']
- ierr, cdata = psspy.aloadchar(-1, 1, cstrings)
-
- bistrings = ['number']
- ierr, bidata1 = psspy.afxshuntint(-1, 1, bistrings)
- bxstrings = ['shuntnom']
- ierr, bxdata1 = psspy.afxshuntcplx(-1, 1, bxstrings)
- bcstrings = ['id']
- ierr, bcdata1 = psspy.afxshuntchar(-1, 1, bcstrings)
-
- psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
- psspy.set_opf_report_subsystem(3,0)
- psspy.nopf(0,1) # Lancement OPF
-
- ok = False
- flagLS = 0
- flagFS = 0
-
- # solved() => check if last solution attempt reached tolerance
- # 0 = met convergence tolerance
- # 1 = iteration limit exceeded
- # 2 = blown up
- # 3 = terminated by non-divergent option
- # 4 = terminated by console input
- # 5 = singular jacobian matrix or voltage of 0.0 detected
- # 6 = inertial power flow dispatch error (INLF)
- # 7 = OPF solution met convergence tolerance (NOPF)
- # 8 does not exist ?
- # 9 = solution not attempted
-
- if psspy.solved() == 7 or psspy.solved()==0:
- pass
- else: #run OPF in loop to attempt convergence
- postOPFinitialization(doci,all_inputs_init_i,AdjLoadTables,init_gen=False,init_bus=False,init_fxshnt=True,init_swshnt=False,init_load=True,init_P0=False)
- MAX_OPF = 5 # 5 = Nombre de lancement max de l'OPF pour atteindre la convergence de l'algorithme
- for nbeOPF in range(0, MAX_OPF):
- psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
- psspy.set_opf_report_subsystem(3,0)
- psspy.nopf(0,1) # Lancement OPF
- if psspy.solved()==7 or psspy.solved()==0:
- break
- else:
- postOPFinitialization(doci,all_inputs_init_i,AdjLoadTables,init_gen=False,init_bus=False,init_fxshnt=True,init_swshnt=False,init_load=True,init_P0=False)
-
- #treat status of OPF
- if psspy.solved() == 7 or psspy.solved()==0:
- ok = True
- elif psspy.solved() == 2:
- print "OPF diverged. (before Q control)"
- elif psspy.solved()== 3:
- print "Terminated by non-divergent option. (before unit commitment)"
- elif psspy.solved()== 4:
- print "Terminated by console input. (before Q control)"
- elif psspy.solved()== 5:
- print "Singular jacobian matrix or voltage of 0.0 detected. (before unit commitment)"
- elif psspy.solved()== 6:
- print "Inertial power flow dispatch error (INLF) (before unit commitment)."
- elif psspy.solved()== 8:
- print "Solution does not exist. (before unit commitment)"
- elif psspy.solved()== 9:
- print "Solution not attempted. (before unit commitment)"
- elif psspy.solved == 2:
- print "OPF diverged. (before unit commitment)"
- elif psspy.solved() == 1: #if iteration limit exceeded, try load flow
- print "Iteration limit exceeded (before unit commitment), trying load flow."
- # Newton-Raphson power flow calculation. Params:
- # tap adjustment flag (0 = disable / 1 = enable stepping / 2 = enable direct)
- # area interchange adjustement (0 = disable)
- # phase shift adjustment (0 = disable)
- # dc tap adjustment (1 = enable)
- # switched shunt adjustment (1 = enable)
- # flat start (0 = default / disabled, 1 = enabled), disabled parce qu'on n'est pas dans une situation de départ
- # var limit (default = 99, -1 = ignore limit, 0 = apply var limit immediatly)
- # non-divergent solution (0 = disable)
- psspy.fnsl([0, _i, 0, 0, 0, 0, _i,_i])
- if psspy.solved() == 0:
- ok=True
- elif psspy.solved() == 2:
- print "Load flow diverged. (before unit commitment)"
- if ok:
- # Returns an "array of complex values for subsystem loads"
- ierr, xdata2 = psspy.aloadcplx(-1, 1, xstrings) # retrieve load MVA # Renvoie une liste de chaque load en format complexe (P+Q)
-
- # aFxShuntInt: return an array of integer values for subsystem fixed shunts
- ierr, bidata2 = psspy.afxshuntint(-1, 1, bistrings)
-
- # aFxShuntCplx: return an array of complex values for sybsystem fixed shunts
- ierr, bxdata2 = psspy.afxshuntcplx(-1, 1, bxstrings) # retrieve bus shunt MVar
-
- #Fixed shunt strings: return array of ids
- ierr, bcdata2 = psspy.afxshuntchar(-1, 1, bcstrings)
-
- # Extraction of the load shedding quantities
- for i in range(len(xdata2[0])):
- if np.real(xdata1)[0][i] != np.real(xdata2)[0][i]: # np.real returns the real part of the elements in the given array
- indexLS.append(i)
- flagLS = 1 # rise flag loadshedding
- try: # if / else would be better here ?
- flagLS
- except:
- flagLS = 0
- else:
- loadShed.append([position]) # Position seems to correspond to the number of the case we are treating
- loadShed[0].extend(['' for i in range(len(indexLS)-1)]) # why [0] ? Maybe it would be better to have 2 lists ? Or a dict ?
- loadShed.append([idata[0][i] for i in indexLS])
- loadShed.append([cdata[0][i] for i in indexLS])
- loadShed.append([np.real(xdata1)[0][i] - np.real(xdata2)[0][i] for i in indexLS])
- loadShed.append([np.real(xdata2)[0][i] for i in indexLS])
- indicLS = sum(loadShed[3]) # sum all Effective MW loads
- loadShed = zip(*loadShed) # transpose the matrix
-
- # extraction adj. fixed shunt quantities
- if len(bidata1[0]) == len(bidata2[0]): # one first opf may have occured...
- # so we check first if both vectors have the same length
-
- for i in range(len(bxdata2[0])):
- if np.imag(bxdata1)[0][i] != np.imag(bxdata2)[0][i]: # search for differences
- indexFS.append(i)
- flagFS = 1 # rise flag adj. bus shunt
- try:
- flagFS
- except:
- flagFS = 0
- else:
- bxdata2[0] = [np.imag(bxdata2)[0][i] for i in indexFS] # fulfill output vector
- bidata2[0] = [bidata1[0][i] for i in indexFS]
- bcdata2[0] = [bcdata1[0][i] for i in indexFS]
- g = -1
- while (g <= len(bidata2)):
- g += 1
- try:
- #if fabs(bxdata2[0][g]) < 1: # discard value in ]-1,1[
- if fabs(bxdata2[0][g]) < 0.001: # discard value in ]-1,1[
- # pdb.set_trace()
- bxdata2[0].pop(g)
- bidata2[0].pop(g)
- bcdata2[0].pop(g)
- g -= 1
- except: pass
- if bxdata2[0] != []: # Get all fixed shunt buses
- fxshnt.append([position])
- fxshnt[0].extend(['' for i in range(len(bxdata2[0]) - 1)]) # Same here => maybe two lists or a dict would be a better choice
- fxshnt.append(bidata2[0])
- fxshnt.append(bxdata2[0])
- indicFS = sum(fxshnt[2])
- fxshnt = zip(*fxshnt) # transpose the matrix
- flagFS = 1
- else:
- flagFS = 0
-
- else: # if not same length, bus data corresponding to the adjustable bus shunt have been added to the vector
- for i in range(len(bidata1[0])): # remove bus data of bus which are not added after the opf
- try:
- bxdata2[0].pop(bxdata2[0].index(bxdata1[0][i]))
- bidata2[0].pop(bidata2[0].index(bidata1[0][i]))
- bcdata2[0].pop(bcdata2[0].index(bcdata1[0][i]))
- except:
- pass
- g = -1
- bx = list(np.imag(bxdata2[0])) # retrieve Mvar
- while g <= len(bidata2):
- g += 1
- try:
- if fabs(bx[g]) < 1: # discard value in ]-1,1[
- bx.pop(g)
- bidata2[0].pop(g)
- g -= 1
- except: pass
- if bx != []:
- fxshnt.append([position])
- fxshnt[0].extend(['' for i in range(len(bidata2[0]) - 1)])
- fxshnt.append(bidata2[0])
- fxshnt.append(bx)
- indicFS = sum(fxshnt[2])
- fxshnt = zip(*fxshnt)
- flagFS = 1
- else:
- flagFS = 0
-
-
- if PSSEParams['SAVE_CASE_BEFORE_UNIT_COMMITMENT']:
- psspy.save(doci_beforeUC)
- all_inputs = read_sav(doci_beforeUC)
- psspy.save(doci)
- all_inputs = read_sav(doci)
-
- buses = all_inputs[0]
- lines = all_inputs[1]
- transf = all_inputs[2]
- plants = all_inputs[3]
- loads = all_inputs[4]
- shunt = all_inputs[5]
- motors = all_inputs[6]
- transf3 = all_inputs[7]
- swshunt = all_inputs[8]
-
-
- gen_UC_list = []
- for item in plants:
- bus = item[0]
- status = item[1]
- _id = item[2]
- pgen = item[3]
- qgen = item[4]
- pmax = item[6]
- name = item[7]
- machine_type = item[11]
-
- #and if a conventional generating unit as specified in Machines tab of PSSE
- if machine_type == 0:
- if abs(pgen) <= pmax*DisconnectThreshhold:
- if status==1:
- #print('P < 5% of Pmax and Q > 0 at bus ' + str(bus) + ' gen ' + str(_id) + '--> generator disconnected.')
- # disconnect the plant
- pgen=0
- qgen=0
- status = 0
- psspy.machine_chng_2(bus, _id, [status,_i,_i,_i,_i,_i],[pgen,qgen,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
- gen_UC_list.append((bus,_id))
- elif machine_type==1: #renewable generator fixed Q limits
- if abs(pgen) <= pmax*0.2 and DEWA_PV_Qlimits: #change q limits if P renewable is < 20% Pmax (DEWA grid code)
- if status==1:
- qmin = -0.04*pmax
- qmax = 0.04*pmax
- qgen=min(qmax,qgen)
- qgen=max(qmin,qgen)
- psspy.machine_chng_2(bus, _id, [_i,_i,_i,_i,_i,_i],[_f,qgen,qmax,qmin,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
- if abs(pgen) <= pmax*0.005 and Disconnect_RES: #disconnect if very low P
- if status==1:
- #print('P < 5% of Pmax and Q > 0 at bus ' + str(bus) + ' gen ' + str(_id) + '--> generator disconnected.')
- # disconnect the plant
- pgen=0
- qgen=0
- status = 0
- psspy.machine_chng_2(bus, _id, [status,_i,_i,_i,_i,_i],[pgen,qgen,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
- gen_UC_list.append((bus,_id))
- elif machine_type==2: #renewable generator with cos phi control
- if abs(pgen) <= pmax*0.005 and Disconnect_RES: #disconnect if very low P
- if status==1:
- #print('P < 5% of Pmax and Q > 0 at bus ' + str(bus) + ' gen ' + str(_id) + '--> generator disconnected.')
- # disconnect the plant
- pgen=0
- qgen=0
- status = 0
- psspy.machine_chng_2(bus, _id, [status,_i,_i,_i,_i,_i],[pgen,qgen,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
- gen_UC_list.append((bus,_id))
- elif machine_type==3: #renewable generator with fixed Q based on cos phi control
- if abs(pgen) <= pmax*0.005 and Disconnect_RES: #disconnect if very low P
- if status==1:
- #print('P < 5% of Pmax and Q > 0 at bus ' + str(bus) + ' gen ' + str(_id) + '--> generator disconnected.')
- # disconnect the plant
- pgen=0
- qgen=0
- status = 0
- psspy.machine_chng_2(bus, _id, [status,_i,_i,_i,_i,_i],[pgen,qgen,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
- gen_UC_list.append((bus,_id))
- elif machine_type==4: #infeed machine that's still considered renewable
- if abs(pgen) <= pmax*0.005 and Disconnect_RES: #disconnect if very low P
- if status==1:
- #print('P < 5% of Pmax and Q > 0 at bus ' + str(bus) + ' gen ' + str(_id) + '--> generator disconnected.')
- # disconnect the plant
- pgen=0
- qgen=0
- status = 0
- psspy.machine_chng_2(bus, _id, [status,_i,_i,_i,_i,_i],[pgen,qgen,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f,_f])
- gen_UC_list.append((bus,_id))
- # 3. Affiche Y
- sizeY4 = len(shunt)
- y_before = np.zeros(2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY3 + 2*sizeY6 + sizeY4 + sizeY8 + 3 * sizeY5 + 3 * sizeY7)
- z_before = [0]*13 # np.zeros returns a new array of the given shape and type filled with zeros
- rate_mat_index = Irate_num + 2
- rate_mat_index_3w = Irate_num + 4
- Ymac_before = np.zeros(sizeY0)
- if ok:
- # Creates the quantities of interest
- for i in range (sizeY2) :
- if lines [i][rate_mat_index]>100 :
- z_before[0]+=1 # Number of lines above 100% of their limits
- for i in range (sizeY5) :
- if transf [i][rate_mat_index]>100 :
- z_before[1]+=1 # Number of transformers above 100% of their limits
- for i in range (sizeY7) :
- if transf3 [i][rate_mat_index_3w]>100 :
- z_before[1]+=1 # Number of transformers above 100% of their limits (each winding of a 3 winding counted)
-
- for i in range (sizeY1):
- if buses[i][2]>buses[i][5] :
- z_before[2]+=1
- if buses[i][2]<buses[i][4] :
- z_before[2]+=1 # Number of buses outside of their voltage limits
- for i in range (sizeY0) :
- z_before[3]+=float(plants[i][3]) # Total active production
- for i in range (sizeY3) :
- z_before[4]+=float(loads[i][1]) # Total active consumption
- for i in range (sizeY6):
- z_before[4]+=float(motors[i][1]) # Add motors to total active consumption
- z_before[5]=(z_before[3]-z_before[4])/z_before[3]*100 # Active power losses
- for i in range (sizeY2) :
- if lines [i][rate_mat_index]>z_before[6] :
- z_before[6]=lines[i][rate_mat_index] # Max flow in lines
- for i in range (sizeY5) :
- if transf [i][rate_mat_index]>z_before[7] :
- z_before[7]=transf[i][rate_mat_index] # Max flow in transformers
- for i in range (sizeY7) :
- if transf3 [i][rate_mat_index_3w]>z_before[7] :
- z_before[7]=transf3[i][rate_mat_index_3w] # Max flow in 3w transformers
-
- for i in range (sizeY2) :
- if lines [i][rate_mat_index]>90 :
- z_before[8]+=1
- z_before[8]=z_before[8]-z_before[0] # Number of lines between 90% and 100% of their limits
- for i in range (sizeY5) :
- if transf [i][rate_mat_index]>90 :
- z_before[9]+=1
- for i in range (sizeY7) :
- if transf3 [i][rate_mat_index_3w]>90 :
- z_before[9]+=1
-
- z_before[9]=z_before[9]-z_before[1] # Number of transformers between 90% and 100% of their limits
-
- z_before[10]=indicFS
-
- z_before[11]=indicLS
-
- z_before[12] = str(gen_UC_list)
-
- # Creates the output vectors
- for Pmach in range (sizeY0):
- y_before[Pmach]=float(plants[Pmach][3])
- Ymac_before[Pmach]=float(plants[Pmach][3])
- for Qmach in range (sizeY0):
- y_before[Qmach+sizeY0]=float(plants[Qmach][4])
- for Vbus in range (sizeY1):
- y_before[Vbus+2*sizeY0]=float(buses[Vbus][2])
- for Iline in range (sizeY2):
- y_before[Iline+2*sizeY0+sizeY1]=float(lines[Iline][rate_mat_index])
- for Pline in range (sizeY2):
- y_before[Pline+2*sizeY0+sizeY1+sizeY2]=float(lines[Pline][6])
- for Qline in range (sizeY2):
- y_before[Qline+2*sizeY0+sizeY1+2*sizeY2]=float(lines[Qline][7])
- for Itrans in range (sizeY5):
- y_before[Itrans+2*sizeY0+sizeY1+3*sizeY2]=float(transf[Itrans][rate_mat_index])
- for Ptrans in range (sizeY5):
- y_before[Ptrans+2*sizeY0+sizeY1+3*sizeY2+sizeY5]=float(transf[Ptrans][6])
- for Qtrans in range (sizeY5):
- y_before[Qtrans+2*sizeY0+sizeY1+3*sizeY2+2*sizeY5]=float(transf[Qtrans][7])
- for Itrans in range (sizeY7):
- y_before[Itrans+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5]=float(transf3[Itrans][rate_mat_index_3w])
- for Ptrans in range (sizeY7):
- y_before[Ptrans+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+sizeY7]=float(transf3[Ptrans][8])
- for Qtrans in range (sizeY7):
- y_before[Qtrans+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+2*sizeY7]=float(transf3[Qtrans][9])
- for Pload in range (sizeY3) :
- y_before[Pload+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+3*sizeY7]=float(loads[Pload][1])
- for Pmotor in range (sizeY6) :
- y_before[Pmotor+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+3*sizeY7+sizeY3]=float(motors[Pmotor][1])
- for Qmotor in range (sizeY6) :
- y_before[Qmotor+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+3*sizeY7+sizeY3+sizeY6]=float(motors[Qmotor][2])
- for Qshunt in range (sizeY4) :
- y_before[Qshunt+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+3*sizeY7+sizeY3+2*sizeY6]=float(shunt[Qshunt][2])
- for Qshunt in range (sizeY8) :
- y_before[Qshunt+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+3*sizeY7+sizeY3+2*sizeY6+sizeY4]=float(swshunt[Qshunt][4])
- nz = len(z_before)
-
- else :
- print 'NON CONVERGENCE BEFORE UNIT COMMITMENT CASE '+str(position)+' CORE '+str(num_pac)
- if TStest==1:
- MyLogger(x2, y_before, z_before, dico['logCSVfilename_UC'][num_pac], timeVect[ite])
- else:
- MyLogger(x2, y_before, z_before, dico['logCSVfilename_UC'][num_pac], position)
-
- #re-initialize OPF for post-unit commitment
- postOPFinitialization(doci,all_inputs_init_i,AdjLoadTables,init_gen=False,init_bus=False,init_fxshnt=True,init_swshnt=False,init_load=True,init_P0=False)
- all_inputs = read_sav(doci)
- loads = all_inputs[4]
-
- #return load to original level post spinning reserve correction for unit commitment
- for num in range(1,16):
- keyname = 'SpinningReserveID_' + str(int(num))
- if PSSEParams.has_key(keyname):
- ReserveID = PSSEParams[keyname]
- ReserveFound=False
- AllReserveActive = []
- for PRD in PeriodReserveData:
- if PRD[0]==ReserveID:
- ReserveFound=True
- ReserveActive = PRD[3]
- AllReserveActive.append(ReserveActive)
- if ReserveFound and any(AllReserveActive):
- for load in loads:
- busNum = load[0]
- ID = load[5]
- P = load[1]
- Q = load[2]
- psspy.load_chng_4(busNum,ID,[_i,_i,_i,_i,_i,_i],[x_remove_reserve*P,x_remove_reserve*Q,_f,_f,_f,_f])
- psspy.save(doci)
- else:
- break
-
- #store loadshedding and added MVAR values for before UC
- loadShed_beforeUC = loadShed
- fxshnt_beforeUC = fxshnt
- indexLS_beforeUC = indexLS
- indexFS_beforeUC = indexFS
-
- indicLS_beforeUC = indicLS
- indicFS_beforeUC = indicFS
-
-
-
- # Unit commitment pass only valid for OPF (economic dispatch turns on and off generators)
- ##=========================================================================#
- # nvm on FAIT deux passages, un puis on regarde les groupes P==0 Q!=0, on les déconnecte et on refait l'OPF
- # Et on log : le % de cas où ça arrive,
- # Combien de puissance réactive il nous faut en moyenne,
- # Quelles sont les machines qui font ça
- loadShed = []
- fxshnt = []
- indexLS = []
- indexFS = []
- indicLS = 0
- indicFS = 0
- flagLS = 0
- flagFS = 0
- ok = False
-
- xstrings = ['mvaact']
- ierr, xdata1 = psspy.aloadcplx(-1, 1, xstrings)
- istrings = ['number']
- ierr, idata = psspy.aloadint(-1, 1, istrings)
- cstrings = ['name']
- ierr, cdata = psspy.aloadchar(-1, 1, cstrings)
- bistrings = ['number']
- ierr, bidata1 = psspy.afxshuntint(-1, 1, bistrings)
- bxstrings = ['shuntnom']
- ierr, bxdata1 = psspy.afxshuntcplx(-1, 1, bxstrings)
- bcstrings = ['id']
- ierr, bcdata1 = psspy.afxshuntchar(-1, 1, bcstrings)
-
- if Debug:
- print "got to principal OPF/LF"
-
- #Solve OPF
- if PSSEParams['ALGORITHM']=='Optimum Power Flow':
- psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
- psspy.set_opf_report_subsystem(3,0)
- psspy.nopf(0,1) # Lancement OPF
- flagLS = 0
- flagFS = 0
- ok = False
- #psspy.fnsl([0, _i, 0, 0, 0, 0, _i,_i]) # Load flow Newton Raphson
-
- # solved() => check if last solution attempt reached tolerance
- # 0 = met convergence tolerance
- # 1 = iteration limit exceeded
- # 2 = blown up
- # 3 = terminated by non-divergent option
- # 4 = terminated by console input
- # 5 = singular jacobian matrix or voltage of 0.0 detected
- # 6 = inertial power flow dispatch error (INLF)
- # 7 = OPF solution met convergence tolerance (NOPF)
- # 8 does not exist ?
- # 9 = solution not attempted
- if psspy.solved() == 7 or psspy.solved()==0:
- pass
- else: #run OPF in loop to attempt convergence
- postOPFinitialization(doci,all_inputs_init_i,AdjLoadTables,init_gen=False,init_bus=False,init_fxshnt=True,init_swshnt=False,init_load=True,init_P0=False)
- MAX_OPF = 5 # 5 = Nombre de lancement max de l'OPF pour atteindre la convergence de l'algorithme
- for nbeOPF in range(0, MAX_OPF):
- psspy.bsys(3,0,[0.0,0.0],0,[],1,[1],0,[],0,[])
- psspy.set_opf_report_subsystem(3,0)
- psspy.nopf(0,1) # Lancement OPF
- if psspy.solved()==7 or psspy.solved()==0:
- break
- else:
- postOPFinitialization(doci,all_inputs_init_i,AdjLoadTables,init_gen=False,init_bus=False,init_fxshnt=True,init_swshnt=False,init_load=True,init_P0=False)
-
- #treat OPF status code
- if psspy.solved() == 7 or psspy.solved() == 0:
- ok = True
- elif psspy.solved() == 2:
- print "OPF diverged."
- elif psspy.solved()== 3:
- print "Terminated by non-divergent option."
- elif psspy.solved()== 4:
- print "Terminated by console input."
- elif psspy.solved()== 5:
- print "Singular jacobian matrix or voltage of 0.0 detected."
- elif psspy.solved()== 6:
- print "Inertial power flow dispatch error (INLF)."
- elif psspy.solved()== 8:
- print "Solution does not exist."
- elif psspy.solved()== 9:
- print "Solution not attempted."
- elif psspy.solved() == 1: #if iteration limit exceeded, try load flow
- print "Iteration limit exceeded, trying load flow (CASE " + str(ite) + ")."
- # Newton-Raphson power flow calculation. Params:
- # tap adjustment flag (0 = disable / 1 = enable stepping / 2 = enable direct)
- # area interchange adjustement (0 = disable)
- # phase shift adjustment (0 = disable)
- # dc tap adjustment (1 = enable)
- # switched shunt adjustment (1 = enable)
- # flat start (0 = default / disabled, 1 = enabled), disabled parce qu'on n'est pas dans une situation de départ
- # var limit (default = 99, -1 = ignore limit, 0 = apply var limit immediatly)
- # non-divergent solution (0 = disable)
- psspy.fnsl([0, _i, 0, 0, 0, 0, _i,_i])
- if psspy.solved() == 0:
- ok=True
- elif psspy.solved() == 2:
- print "Load flow diverged"
- # else:
- # #PlimitList = []
- # #QlimitList = []
- # if Debug:
- # print "Got to principal economic dispatch"
- # #economic dispatch
- # EcdErrorCodes, LFcode, Plimit, Qlimit = EconomicDispatch(doci, ecd_file, LossesRatio, TapChange)
- # #PlimitList.append(Plimit)
- # #QlimitList.append(Qlimit)
- # if Debug:
- # print "Ran principal economic dispatch"
- # if np.any(np.array(EcdErrorCodes)!=0):
- # print "Error in economic dispatch."
- # elif LFcode != 0:
- # print "Load flow did not converge"
- # else:
- # ok = True
- #
- # if Plimit == True:
- # print "Swing generator exceeds active power limits after economic dispatch."
- # if Qlimit == True:
- # print "Swing generator exceeds reactive power limits after economic dispatch."
- if ok==True:
- ierr, xdata2 = psspy.aloadcplx(-1, 1, xstrings) # retrieve load MVA # Renvoie une liste de chaque load en format complexe (P+Q)
- ierr, bidata2 = psspy.afxshuntint(-1, 1, bistrings)
- ierr, bxdata2 = psspy.afxshuntcplx(-1, 1, bxstrings) # retrieve bus shunt MVar
- ierr, bcdata2 = psspy.afxshuntchar(-1, 1, bcstrings)
- # Extraction of the load shedding quantities
- for i in range(len(xdata2[0])):
- if np.real(xdata1)[0][i] != np.real(xdata2)[0][i]: # np.real returns the real part of the elements in the given array
- indexLS.append(i)
- flagLS = 1 # raise flag loadshedding
- try: # if / else would be better here ?
- flagLS
- except:
- flagLS = 0
- else:
- loadShed.append([position]) # Position seems to correspond to the number of the case we are treating
- loadShed[0].extend(['' for i in range(len(indexLS)-1)]) # why [0] ? Maybe it would be better to have 2 lists ? Or a dict ?
- loadShed.append([idata[0][i] for i in indexLS])
- loadShed.append([cdata[0][i] for i in indexLS])
- loadShed.append([np.real(xdata1)[0][i] - np.real(xdata2)[0][i] for i in indexLS]) #loadShed[3]
- loadShed.append([np.real(xdata2)[0][i] for i in indexLS]) #loadShed[4]
- indicLS = sum(loadShed[3]) # sum all Effective MW loads #sum(loadShed[3])
- loadShed = zip(*loadShed) # transpose the matrix
-
- # extraction adj. fixed shunt quantities
- if len(bidata1[0]) == len(bidata2[0]): # one first opf may have occured...
- # so we check first if both vectors have the same length
-
- for i in range(len(bxdata2[0])):
- if np.imag(bxdata1)[0][i] != np.imag(bxdata2)[0][i]: # search for differencies
- indexFS.append(i)
- flagFS = 1 # rise flag adj. bus shunt
- try:
- flagFS
- except:
- flagFS = 0
- else:
- bxdata2[0] = [np.imag(bxdata2)[0][i] for i in indexFS] # fulfill output vector
- bidata2[0] = [bidata1[0][i] for i in indexFS]
- bcdata2[0] = [bcdata1[0][i] for i in indexFS]
- g = -1
- while (g <= len(bidata2)):
- g += 1
- try:
- #if fabs(bxdata2[0][g]) < 1: # discard value in ]-1,1[
- if fabs(bxdata2[0][g]) < 0.001: # discard value in ]-1,1[
- # pdb.set_trace()
- bxdata2[0].pop(g)
- bidata2[0].pop(g)
- bcdata2[0].pop(g)
- g -= 1
- except: pass
- if bxdata2[0] != []: # Get all fixed shunt buses
- fxshnt.append([position])
- fxshnt[0].extend(['' for i in range(len(bxdata2[0]) - 1)]) # Same here => maybe two lists or a dict would be a better choice
- fxshnt.append(bidata2[0])
- fxshnt.append(bxdata2[0])
- indicFS = sum(fxshnt[2])
- fxshnt = zip(*fxshnt) # transpose the matrix
- flagFS = 1
- else:
- flagFS = 0
-
- else: # if not same length, bus data corresponding to the adjusted bus shunt have been added to the vector
- for i in range(len(bidata1[0])): # remove bus data of bus which are not added after the opf
- try:
- bxdata2[0].pop(bxdata2[0].index(bxdata1[0][i]))
- bidata2[0].pop(bidata2[0].index(bidata1[0][i]))
- bcdata2[0].pop(bcdata2[0].index(bcdata1[0][i]))
- except:
- pass
- g = -1
- bx = list(np.imag(bxdata2[0])) # retrieve Mvar
- while g <= len(bidata2):
- g += 1
- try:
- if fabs(bx[g]) < 1: # discard value in ]-1,1[
- bx.pop(g)
- bidata2[0].pop(g)
- g -= 1
- except: pass
- if bx != []:
- fxshnt.append([position])
- fxshnt[0].extend(['' for i in range(len(bidata2[0]) - 1)])
- fxshnt.append(bidata2[0])
- fxshnt.append(bx)
- indicFS = sum(fxshnt[2])
- fxshnt = zip(*fxshnt)
- flagFS = 1
- else:
- flagFS = 0
-
-
- psspy.save(doci)
- all_inputs=read_sav(doci)
- buses = all_inputs[0]
- lines = all_inputs[1]
- transf = all_inputs[2]
- plants = all_inputs[3]
- loads = all_inputs[4]
- shunt = all_inputs[5]
- motors = all_inputs[6]
- transf3=all_inputs[7]
- swshunt = all_inputs[8]
-
- #pdb.set_trace()
-
- # 3. Affiche Y
- sizeY4 = len(shunt)
- y = np.zeros(2 * sizeY0 + sizeY1 + 3 * sizeY2 + sizeY3 + 2*sizeY6 + sizeY4 + sizeY8 + 3 * sizeY5+ 3 * sizeY7)
- z = np.zeros(12+ 2*int(PSSEParams['ALGORITHM']=='Economic Dispatch and Power Flow')) # np.zeros returns a new array of the given shape and type filled with zeros
- rate_mat_index = Irate_num + 2
- rate_mat_index_3w = Irate_num + 4
- Ymac = np.zeros(sizeY0)
- if ok:
- # Creates the quantities of interest
- for i in range (sizeY2) :
- if lines [i][rate_mat_index]>100 :
- z[0]+=1 # Number of lines above 100% of their limits
- for i in range (sizeY5) :
- if transf [i][rate_mat_index]>100 :
- z[1]+=1 # Number of transformers above 100% of their limits
- for i in range (sizeY7) :
- if transf3 [i][rate_mat_index_3w]>100 :
- z[1]+=1 # Add number of 3w transformers above 100% of their limits
- for i in range (sizeY1):
- if buses[i][2]>buses[i][5] :
- z[2]+=1
- if buses[i][2]<buses[i][4] :
- z[2]+=1 # Number of buses outside of their voltage limits
- for i in range (sizeY0) :
- z[3]+=float(plants[i][3]) # Total active production
- for i in range (sizeY3) :
- z[4]+=float(loads[i][1]) # Total active consumption
- for i in range (sizeY6) :
- z[4]+=float(motors[i][1]) # add total active consumption from motors
- z[5]=(z[3]-z[4])/z[3]*100 # Active power losses
- for i in range (sizeY2) :
- if lines [i][rate_mat_index]>z[6] :
- z[6]=lines[i][rate_mat_index] # Max flow in lines
- for i in range (sizeY5) :
- if transf [i][rate_mat_index]>z[7] :
- z[7]=transf[i][rate_mat_index] # Max flow in transformers
- for i in range (sizeY7) :
- #pdb.set_trace()
- if transf [i][rate_mat_index]>z[7] :
- z[7]=transf3[i][rate_mat_index_3w] # Max flow in 3w transformers
- for i in range (sizeY2) :
- if lines [i][rate_mat_index]>90 :
- z[8]+=1
- z[8]=z[8]-z[0] # Number of lines between 90% and 100% of their limits
- for i in range (sizeY5) :
- if transf [i][rate_mat_index]>90 :
- z[9]+=1
- for i in range (sizeY7) :
- if transf3 [i][rate_mat_index_3w]>90 :
- z[9]+=1
- z[9]=z[9]-z[1] # Number of transformers between 90% and 100% of their limits
-
- z[10]=indicFS
-
- z[11]=indicLS
-
- # if PSSEParams['ALGORITHM']=='Economic Dispatch and Power Flow':
- # z[12] = int(Plimit)
- # z[13] = int(Qlimit)
-
- # Creates the output vectors
- for Pmach in range (sizeY0):
- y[Pmach]=float(plants[Pmach][3])
- Ymac[Pmach]=float(plants[Pmach][3])
- for Qmach in range (sizeY0):
- y[Qmach+sizeY0]=float(plants[Qmach][4])
- for Vbus in range (sizeY1):
- y[Vbus+2*sizeY0]=float(buses[Vbus][2])
- for Iline in range (sizeY2):
- y[Iline+2*sizeY0+sizeY1]=float(lines[Iline][rate_mat_index])
- for Pline in range (sizeY2):
- y[Pline+2*sizeY0+sizeY1+sizeY2]=float(lines[Pline][6])
- for Qline in range (sizeY2):
- y[Qline+2*sizeY0+sizeY1+2*sizeY2]=float(lines[Qline][7])
- for Itrans in range (sizeY5):
- y[Itrans+2*sizeY0+sizeY1+3*sizeY2]=float(transf[Itrans][rate_mat_index])
- for Ptrans in range (sizeY5):
- y[Ptrans+2*sizeY0+sizeY1+3*sizeY2+sizeY5]=float(transf[Ptrans][6])
- for Qtrans in range (sizeY5):
- y[Qtrans+2*sizeY0+sizeY1+3*sizeY2+2*sizeY5]=float(transf[Qtrans][7])
- for Itrans in range (sizeY7):
- y[Itrans+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5]=float(transf3[Itrans][rate_mat_index_3w])
- for Ptrans in range (sizeY7):
- y[Ptrans+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+sizeY7]=float(transf3[Ptrans][8])
- for Qtrans in range (sizeY7):
- y[Qtrans+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+2*sizeY7]=float(transf3[Qtrans][9])
- for Pload in range (sizeY3) :
- y[Pload+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+3*sizeY7]=float(loads[Pload][1])
- for Pmotor in range (sizeY6) :
- y[Pmotor+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+3*sizeY7+sizeY3]=float(motors[Pmotor][1])
- for Qmotor in range (sizeY6) :
- y[Qmotor+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+3*sizeY7+sizeY3+sizeY6]=float(motors[Qmotor][2])
- for Qshunt in range (sizeY4) :
- y[Qshunt+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+3*sizeY7+sizeY3+2*sizeY6]=float(shunt[Qshunt][2])
- for Qshunt in range (sizeY8) :
- y[Qshunt+2*sizeY0+sizeY1+3*sizeY2+3*sizeY5+3*sizeY7+sizeY3+2*sizeY6+sizeY4]=float(swshunt[Qshunt][4])
-
- nz = len(z)
-
- else :
- print ('NON CONVERGENCE CASE '+str(position)+' CORE '+str(num_pac))
-
- if dico['UnitCommitment']:
- Output_beforeUC.append(z_before)#append the output
- Pmachine_beforeUC.append(Ymac_before)
- LS_beforeUC.append(indicLS_beforeUC)
- FS_beforeUC.append(indicFS_beforeUC)
- LStable_beforeUC.extend(loadShed_beforeUC)
- FStable_beforeUC.extend(fxshnt_beforeUC)
-
- Output.append(z)#append the output
- Pmachine.append(Ymac)
- LS.append(indicLS)
- FS.append(indicFS)
- LStable.extend(loadShed)
- FStable.extend(fxshnt)
-
- if TStest==1:
- MyLogger(x2,y,z,logCSVfilename[num_pac],timeVect[ite])
- else:
- MyLogger(x2,y,z,logCSVfilename[num_pac],position) #for each iteration write in the CSV
-
-## if dico['TStest']==1:
-## sys.stdout.close()
- return inputSample, Output, Pmachine, LS, FS, LStable, FStable, Output_beforeUC, Pmachine_beforeUC, LS_beforeUC, FS_beforeUC, LStable_beforeUC, FStable_beforeUC
-
-## except Exception,e:
-## print e
-## a=[]
-## return a
-
-def create_dist(dico):
-
- NumLaws = len(dico['Laws']) + int(dico['N_1_fromFile'])
-
- #Create a correlation matrix as copulas
- CorrMatrixNames = dico['CorrMatrix']['laws']
- CorrMatrix = dico['CorrMatrix']['matrix']
- corr=CorrelationMatrix(NumLaws)#Openturns
-
- # Create a collection of the marginal distributions
- collectionMarginals = DistributionCollection(NumLaws)#Openturns
-
- distributionX = []
- for i,key in enumerate(CorrMatrixNames):
- data, [time_serie, time_serie_file] = getUserLaw(dico['Laws'][key])
- distributionX.append( data )
- collectionMarginals[i] = Distribution(data)
-
- #add N_1 components entered as Files
- if dico['N_1_fromFile']==True:
- continTuples = []
- for j in range(len(dico['continVal'])):
- continTuples.append((dico['continVal'][j],dico['continProb'][j]))
- data = getUserDefined(continTuples)
- distributionX.append(data)
- collectionMarginals[i+1] = Distribution(data)
- CorrMatrixNames.append('N_1_fromFile')
- CorrMatrixEx = np.hstack((CorrMatrix, np.zeros((NumLaws-1,1)))) #assume no correlation between N-1 and other laws
- LastLine = np.hstack((np.zeros((1,NumLaws-1)),np.ones((1,1))))
- CorrMatrixEx = np.vstack((CorrMatrixEx, LastLine))
- CorrMatrix = CorrMatrixEx
- (Nrows, Ncols) = np.shape(CorrMatrixEx)
- else:
- (Nrows, Ncols) = np.shape(CorrMatrix)
- for i in range(Nrows):
- for j in range(Ncols):
- corr[i,j]=CorrMatrix[i,j]
-
- corr2= NormalCopula.GetCorrelationFromSpearmanCorrelation(corr)
- copula=Copula(NormalCopula(corr2))
- #copula=Copula(NormalCopula(corr))
-
- # Create the input probability distribution, args are the distributions, the correlation laws
- inputDistribution = ComposedDistribution(collectionMarginals, copula)
-
- return inputDistribution
-
-def Calculation(dico,data1,msg):
-
-
- os.chdir(dico['doc_base']) #to work in correct directory
-## sys.stdout=open('process num'+str(os.getpid())+'_package '+\
-## str(dico['num_pac'])+'.out','w')
- #pdb.set_trace()
- flag2=dico['flag2']
- inputDistribution=create_dist(dico) #create new distribution
- #initialization
- LStable=[]
- FStable=[]
- output=[]
- inputSample=[]
- Pmachine=[]
-
- LStable_beforeUC=[]
- FStable_beforeUC=[]
- output_beforeUC=[]
- Pmachine_beforeUC=[]
-
- outputSampleAll=NumericalSample(0,12 + 2*int(dico['PSSEParams']['ALGORITHM']=='Economic Dispatch and Power Flow'))
-
- RandomGenerator.SetSeed(os.getpid())
- Message=msg.get()
- print(Message+'=======OK')
-
-
- while(Message !='stop'):
- myMCE = MonteCarloExperiment(inputDistribution,dico['lenpac']) #create new sample
- inputSamp = myMCE.generate()
-
- try:
- Message=msg.get(block=False)
- if Message=='stop': break
- except:
- pass
- res=PSSEFunct(dico.copy(),inputSamp) #launch PSSEFunct (OPF)
- #subprocess.Popen(['c:/python34/python.exe','PFfunction.py'])
- dico['position']+=dico['lenpac']
- # 0 1 2 3 4 5 6
- #inputSample, Output, Pmachine, LS, FS, LStable, FStable,
- # 7 8 9 10 11 12
- #Output_beforeUC, Pmachine_beforeUC, LS_beforeUC, FS_beforeUC, LStable_beforeUC, FStable_beforeUC
- for result in res[1]:
- outputSampleAll.add(NumericalPoint(result)) #create a Numerical Sample variable
-
- if (flag2):
- LS=(np.mean(res[3])) #mean per package
- FS=(np.mean(res[4])) #mean per package
- z=[LS,FS]
- data1.put(z)
- sleep(1)
-
- #if criteria on nbeTension and NbeTransit
- else:
- NbeTransit=(float(NumericalPoint(1,outputSampleAll.computeMean()[0])[0])) #mean per package
- NbeTension=(float(NumericalPoint(1,outputSampleAll.computeMean()[1])[0]))
- z=[NbeTransit,NbeTension]
- data1.put(z)
- sleep(1)
-
- inputSample.extend(res[0])
-
- LStable.extend(res[5])
- FStable.extend(res[6])
- output.extend(res[1])
- Pmachine.extend(res[2])
-
- LStable_beforeUC.extend(res[11])
- FStable_beforeUC.extend(res[12])
- output_beforeUC.extend(res[7])
- Pmachine_beforeUC.extend(res[8])
-
- if msg.empty():
- Message = "empty"
- else:
- Message=msg.get(block=True,timeout=2)
- print 'MSG is '+str(Message)+' time: '+str(strftime("%Hh%Mm%S", gmtime()))
-
-# sys.stdout.close()
-
- ## #write summary tables for before UC
- if dico['UnitCommitment']:
- f=open(dico['logCSVfilename_UC'][dico['num_pac']],'a')
- f.write("\n Summary Table for MW Load Adjustments;;;;;;;;Summary Table for Added Shunt (Mvar)\n")
- f.write("Iteration;;Bus Number;Name;Load Shed;Remaining Load;;;Iteration;;Bus Number;Final \n")
- for i in range(max(len(LStable_beforeUC),len(FStable_beforeUC))):
- try:
- f.write('{0};;{1};{2};{3};{4}'.format(LStable_beforeUC[i][0],LStable_beforeUC[i][1]\
- ,LStable_beforeUC[i][2],LStable_beforeUC[i][3],LStable_beforeUC[i][4]))
- except:
- f.write(';;;;;')
-
- try:
- f.write(';;;{0};;{1};{2} \n'.format(FStable_beforeUC[i][0],FStable_beforeUC[i][1],FStable_beforeUC[i][2]))
- except:
- f.write('\n')
- f.write("\n\n")
- f.close()
-
- ## #write summary tables
- f=open(dico['logCSVfilename'][dico['num_pac']],'a')
- f.write("\n Summary Table for MW Load Adjustments;;;;;;;;Summary Table for Added Shunt (Mvar)\n")
- f.write("Iteration;;Bus Number;Name;Load Shed;Remaining Load;;;Iteration;;Bus Number;Final \n")
- for i in range(max(len(LStable), len(FStable))):
- try:
- f.write('{0};;{1};{2};{3};{4}'.format(LStable[i][0],LStable[i][1]\
- ,LStable[i][2],LStable[i][3],LStable[i][4]))
- except:
- f.write(';;;;;')
- try:
- f.write(';;;{0};;{1};{2} \n'.format(FStable[i][0],FStable[i][1],FStable[i][2]))
- except:
- f.write('\n')
- f.write("\n\n")
- f.close()
-
-
-
- return output, inputSample,Pmachine
-
-class NonBlockingStreamReader(): #class object to read in a stdout process
-
- def __init__(self, stream):
- '''
- stream: the stream to read from.
- Usually a process' stdout or stderr.
- '''
-
- self._s = stream
- self._q = Queue()
-
- def _populateQueue(stream, queue):
- '''
- Collect lines from 'stream' and put them in 'queue'.
- '''
-
- while True:
- line = stream.read()
- if line:
- queue.put(line)
- else:
- pass
-
- self._t = Thread(target = _populateQueue,
- args = (self._s, self._q))
- self._t.daemon = True
- self._t.start() #start collecting lines from the stream
-
- def readline(self, timeout = None):
- try:
- return self._q.get(block = timeout is not None,
- timeout = timeout)
- except Empty:
- return None
-
-
-def Convergence(data2,msg,OPF,nb_fix,cmd_Path):
-
- LS=[]
- FS=[]
- MoyTension=[]
- MoyTransit=[]
- MoyCumuLS=[]
- MoyCumuFS=[]
- NbeTension=[]
- NbeTransit=[]
- Ind1=[]
- Ind2=[]
- ind=1
- t=0
- p=subprocess.Popen(['python',cmd_Path],stdout=subprocess.PIPE) #launch subprocess
- nbsr=NonBlockingStreamReader(p.stdout) #monitor subprocess stdout
-
- print 'Calculating convergence criteria\n'
- while(ind):
-
- output=nbsr.readline(0.1)
- if output:
- print 'Simulation Interrupting.....'
- break
-
- for i in range(multiprocessing.cpu_count()*3): #put 'ok' in the queue three times the number of cores
- msg.put('ok')
-
- debut=data2.get(block=True)
- t+=1
- print 'Package '+str(t)
-
- if (OPF): #if criteria on Load shed and mvar
- LS.append(debut[0])
- FS.append(debut[1])
-
- MoyCumuLS.append(np.mean(LS[0:t]))
- MoyCumuFS.append(np.mean(FS[0:t]))
-
- if t==1:
- indice1=1
- indice2=1
- else:
- indice1=np.std(MoyCumuLS) #calculate stop criterion for load shedding
- indice2=np.std(MoyCumuFS) #calculate stop criterion for mvar
-
- Ind1.append(indice1)
- Ind2.append(indice2)
- print 'indicator Load Shedding= '+str(indice1)+';'+' indicator Added Mvar= '+str(indice2)+'\n'
-
- if (indice1<0.2) and (indice2<0.015) and nb_fix==0:
- ind=0
- break
- elif len(Ind1)==nb_fix:
- break
- else:
- NbeTransit.append(debut[0])
- NbeTension.append(debut[1])
-
- MoyTension.append(np.mean(NbeTension[0:len(NbeTension)]))
- MoyTransit.append(np.mean(NbeTransit[0:len(NbeTransit)]))
-
- if t==1:
- indice1=1
- indice2=1
- else:
- indice1=np.std(MoyTension) #calculate stop criterion for tension
- indice2=np.std(MoyTransit) #calculate stop criterion for transit
-
- Ind1.append(indice1)
- Ind2.append(indice2)
- print 'indicator Nbe Tension= '+str(indice1)+' indicator Transit= '+str(indice2)+'\n'
-
- if (indice1<0.01) and (indice2<0.01) and nb_fix==0:
- ind=0
- break
- elif len(Ind1)==nb_fix:
- break
-
- while msg.empty()==False : #flush the queue
- msg.get()
- # print(msg.qsize())
- for i in range(100): #put a lot of 'stop's in the queue to make all processes stop
- msg.put_nowait('stop')
- # print(msg.qsize())
-
- p.terminate()
-
- return Ind1,Ind2
+++ /dev/null
-# -*- coding: utf-8 -*-
-"""
-Created on Mon Jun 03 15:31:42 2013
-
-@author: B31272
-
-Fonctions de support
-"""
-import os,sys,random,string,time,pickle
-import PSENconfig
-sys.path.append(PSENconfig.Dico['DIRECTORY']['PF_path'])
-os.environ['PATH'] += ';' + os.path.dirname(os.path.dirname(PSENconfig.Dico['DIRECTORY']['PF_path'])) + ';'
-#sys.path.append(PSENconfig.Dico['DIRECTORY']['PF_path'])
-#os.environ['PATH'] = PSENconfig.Dico['DIRECTORY']['PF_path'] + ";"+ os.environ['PATH']
-import powerfactory
-
-
-import numpy as np
-from math import *
-from decimal import *
-from openturns import *
-from time import sleep, strftime, gmtime
-import multiprocessing
-from threading import Thread
-from queue import Queue, Empty
-import pdb
-#===============================================================================
-# DEFINITION DES FONCTIONS - CREATION OF THE FUNCTIONS
-#===============================================================================
-
-
-#to remve a list from a string "['wind 1', 'wind 2', 'charge']" --> ['wind 1', 'wind 2', 'charge']
-def RemoveListfromString(List):
- List = List.replace("]","")
- List = List.replace("[","")
- List = List.replace(")","")
- List = List.replace("(","")
- List = List.replace("'","")
- List = List.replace('"',"")
- List = List.replace(" ","")
- List = List.split(",")
- return List
-
-def RemoveTuplesfromString(TList):
- TL = RemoveListfromString(TList)
- nTL = []
- for i in range(len(TL)/2):
- nTL.append([TL[2*i],float(TL[2*i+1])])
- return nTL
-
-def applyTF(x_in, TF):
-
- X = []
- P = []
- for (x,p) in TF:
- X.append(x)
- P.append(p)
-
-
- Pmax=max(P)
- precision = 0.001
- #calculate precision of values entered
- for i in range(len(X)):
- d1 = Decimal(str(X[i]))
- d2 = Decimal(str(P[i]))
- d1expo = d1.as_tuple().exponent
- d2expo = d2.as_tuple().exponent
- expo = np.minimum(d1expo,d2expo)
- precision = min(10**(expo-1),precision)
-
-
- #change to array type for consistency
- X = np.array(X)
- P = np.array(P)
-
- #interpolate between values so that precise wind speed data doesnt output heavily discretized power levels
- from scipy import interpolate
- finterp = interpolate.interp1d(X,P, kind='linear')
- Xmin = min(X)
- Xmax = max(X)
- Xnew = np.arange(Xmin,Xmax,precision)
- Pnew = finterp(Xnew)
-
- #calculate power by applying transfer function
- if x_in >= Xmax-precision:
- index = len(Pnew)-1
- elif x_in <= Xmin + precision:
- index = 0
- else:
- index = int(round((x_in-Xmin)/precision))
- Power = Pnew[index]
-
- PowerNorm = Power/Pmax #normalize
-
- return PowerNorm
-
-def eol(WS, z_WS, pathWT, HH, alpha=1./7, PercentLoss = 5):
-
- '''
-
- Reconstitute wind production from wind speed histories for a single site.
-
- syntax:
- ACPower = ReconstituteWind(WS, z_WS, pathWT, N_turbines, HH, alpha=1./7, PercentLoss=5)
-
- inputs:
- WS: numpy array of wind speed measurements to be converted to production values
- z_WS: height, in meters above ground level, of the wind speed measurements
- pathWT: location of selected wind turbine technology's power curve file in computer file system
- N_turbines: number of wind turbines in the installation/farm being modelled
- HH: wind turbine hub height
- alpha (optional, default = 1/7): exponential factor describing the vertical wind profile; used to extrapolate
- wind data to hub height. Can be scalar or vector with same length as wind data.
- PercentLoss (optional, default = 5): percent loss due to multiple effects: the wake effect of adjacent wind turbines,
- cable resistance between wind turbine/farm and injection point, grid and turbine unavailability, extreme weather conditions, etc.
-.
-
- outputs:
- ACPower: numpy array of normalized expected wind production for the given wind farm.
-
- '''
-
-
- #open and treat wind turbine data in .pow file
- f = open(pathWT)
- lines = f.readlines()
- WTdata = {}
- WTdata["model"] = lines[0][1:-2]
- WTdata['diameter'] = float(lines[1][1:-2])
- WTdata['CutInWindSpeed'] = float(lines[4][1:-2])
- WTdata['CutOutWindSpeed'] = float(lines[3][1:-2])
- WTdata['PowerCurve'] = {}
- WTdata['PowerCurve']['WindSpeed'] = np.arange(0, 31)
- WTdata['PowerCurve']['Power'] = [float(0)] #in kW
- for i in range(5,35):
- WTdata['PowerCurve']['Power'].append(float(lines[i][1:-2]))
-
- WTdata['Pmax']=max(WTdata['PowerCurve']['Power'])
-
- #insert WT hub height
- WTdata['z'] = HH
-
- #correct wind speed values for appropriate height
- WS_hh = WS*(WTdata['z']/z_WS)**alpha #wind speed at hub height
-
- #calculate precision of cut in and cut out windspeeds
- d1 = Decimal(str(WTdata['CutInWindSpeed']))
- d2 = Decimal(str(WTdata['CutOutWindSpeed']))
- expo = np.minimum(d1.as_tuple().exponent, d2.as_tuple().exponent)
- precision = 10**(expo-1)
-
- #insert points for cut-in and cut-out wind speeds
- add_ci = 0
- add_co= 0
- if np.mod(WTdata['CutInWindSpeed'],1)==0:
- add_ci = precision
- if np.mod(WTdata['CutOutWindSpeed'],1)==0:
- add_co = precision
- i_cutin = np.where(WTdata['PowerCurve']['WindSpeed']>(WTdata['CutInWindSpeed']+add_ci))[0][0]
- i_cutout = np.where(WTdata['PowerCurve']['WindSpeed']>(WTdata['CutOutWindSpeed']+add_co))[0][0] + 1 #+1 to account for addition of cut in point
- WTdata['PowerCurve']['WindSpeed'] = list(WTdata['PowerCurve']['WindSpeed'])
- WTdata['PowerCurve']['WindSpeed'].insert(i_cutin, WTdata['CutInWindSpeed']+add_ci)
- WTdata['PowerCurve']['WindSpeed'].insert(i_cutout, WTdata['CutOutWindSpeed']+add_co)
- WTdata['PowerCurve']['Power'].insert(i_cutin, 0)
- WTdata['PowerCurve']['Power'].insert(i_cutout, 0)
-
- #change to array type for consistency
- WTdata['PowerCurve']['WindSpeed'] = np.array(WTdata['PowerCurve']['WindSpeed'])
- WTdata['PowerCurve']['Power'] = np.array(WTdata['PowerCurve']['Power'])
-
- #interpolate between values so that precise wind speed data doesnt output heavily discretized power levels
- from scipy import interpolate
- finterp = interpolate.interp1d(WTdata['PowerCurve']['WindSpeed'],WTdata['PowerCurve']['Power'], kind='linear')
- Vnew = np.arange(0,30,precision)
- Pnew = finterp(Vnew)
-
- #calculate power produced by turbine in function of wind speed
- Pac_turbine = []
- for i, ws in enumerate(WS_hh):
- if ws >= 30-precision:
- index = len(Pnew)-1
- else:
- index = int(round(ws/precision)) #index of correct wind speed
- Pac_turbine.append(Pnew[index]) #Power corresponds to value with same index as wind speed vector
- Pac_turbine = np.array(Pac_turbine)
-
- #account for Losses...currently a single loss factor but could imagine implementing a per-point method
- #WakeEffects = 4 #3-8% for a typical farm, 0% for an individual windmill
- #CableResistanceLosses = 1 #1-3% between windmills and electric counter, depending on voltage levels and cable length
- #GridUnavalability = 1
- #WTUnavailability = 3
- #AcousticCurtailment = 1-4
- #Freezing = 0.5
- #LF = (1-WakeEffects/100)*(1-CableResistanceLosses/100) #loss factor
- ACPower = Pac_turbine*(1-PercentLoss/100) #total AC power produced by wind turbine
- ACPowerNorm = ACPower/WTdata['Pmax']
- return ACPowerNorm
-
-#Fonction permettant de lire les donnees qui nous interessent et de les mettre dans une matrice
-def read_pfd(app,doc,recal=0):
- ########################################################
- # ojectif de cette fonction: prendre les parametres du reseau
- ########################################################
- # si recal==1, recalculer loadflow
- prj = app.GetActiveProject()
- studycase=app.GetActiveStudyCase()
- grids=studycase.GetChildren(1,'*.ElmNet',1)[0].contents
- if recal == 1:#calculer load-flow
- ldf = app.GetFromStudyCase('ComLdf')
- ldf.Execute() #run
- tous=[]
- for grid in grids:
- tous.extend(grid.obj_id.GetContents('*.ElmTerm', 1))
- bus = []
- for noeud in tous:
- bus.append(noeud)
- noeuds = sorted(bus, key=lambda x: x.cStatName)
- buses = []
-
- for ii in range(len(noeuds)):
- if noeuds[ii].HasResults():
- mu = noeuds[ii].GetAttribute('m:u')
- mphiu = noeuds[ii].GetAttribute('m:phiu')
- else :
- mu = 0
- mphiu = 0
- busname = noeuds[ii].cStatName.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- aa = [ii, noeuds[ii].uknom, mu, busname, noeuds[ii].vmin,
- noeuds[ii].vmax, noeuds[ii].GetBusType(), mphiu,noeuds[ii]]
- # [numero,nominal KV,magnitude p.u, busname,Vmin,Vmax,type,angle degre,obj]
- buses.append(aa)
- ##== == == == == == == == == == = Line===================== Line===================== Line
- # lignes = app.GetCalcRelevantObjects('*.ElmLne', 0)
- tous=[]
- for grid in grids:
- tous.extend(grid.obj_id.GetContents( '*.ElmLne', 1))
- lines=[]
- for line in tous:
- frombus_name=line.bus1.cBusBar.cStatName
- frombus_name = frombus_name.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- for ii in range(len(buses)):
- if frombus_name in buses[ii]:
- frombus_number=ii
- break
- tobus_name=line.bus2.cBusBar.cStatName
- tobus_name = tobus_name.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- for ii in range(len(buses)):
- if tobus_name in buses[ii]:
- tobus_number=ii
- break
-
- outs = line.GetChildren(1, 'outserv.Charef', 1)
- if outs:
- if outs[0].outserv==0:
- outserv = outs[0].typ_id.curval
- else:
- outserv = line.outserv
- else:
- outserv = line.outserv
- if outserv==1:
- currentA = 0
- pourcent = 0
- flowP = 0
- flowQ = 0
- else:
- currentA=line.GetAttribute('m:I:bus1') #courant en A de la ligne
- pourcent=line.GetAttribute('c:loading') # taux de charge de la ligne
- flowP=line.GetAttribute('m:P:bus1')
- flowQ = line.GetAttribute('m:Q:bus1')
-
- idline=line.loc_name#line.nlnum
- aa=[frombus_number,tobus_number,currentA,pourcent,pourcent,pourcent,flowP,flowQ,frombus_name,tobus_name,idline,line]
- lines.append(aa)
-
- # 2 windings transformers data (from, to, amps, rate%a, ploss, qloss)==============Transformateur2
- tous=[]
- for grid in grids:
- tous.extend(grid.obj_id.GetContents( '*.ElmTr2', 1))
- transf=[]
- for trans in tous:
- frombus_name=trans.bushv.cBusBar.cStatName
- frombus_name = frombus_name.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- for ii in range(len(buses)):
- if frombus_name in buses[ii]:
- frombus_number=ii
- break
- tobus_name=trans.buslv.cBusBar.cStatName
- tobus_name = tobus_name.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- for ii in range(len(buses)):
- if tobus_name in buses[ii]:
- tobus_number=ii
- break
-
- outs = trans.GetChildren(1, 'outserv.Charef', 1)
- if outs:
- if outs[0].outserv==0:
- outserv = outs[0].typ_id.curval
- else:
- outserv = trans.outserv
- else:
- outserv = trans.outserv
-
- if trans.outserv == 1 or outserv==1:
- currentA = 0
- pourcent = 0
- flowP = 0
- flowQ = 0
- else:
- currentA=trans.GetAttribute('m:I:bushv') #courant en A du bus hv
- pourcent=trans.GetAttribute('c:loading') # taux de charge
- flowP=trans.GetAttribute('m:P:bushv')
- flowQ = trans.GetAttribute('m:Q:bushv')
- # idtr=trans.ntnum
- idtr = trans.loc_name
- aa=[frombus_number,tobus_number,currentA,pourcent,pourcent,pourcent,flowP,flowQ,frombus_name,tobus_name,idtr,trans]
- transf.append(aa)
- #3 windings transformers data (from, to, amps, rate%a, ploss, qloss)==============Transformateur3
- tous=[]
- for grid in grids:
- tous.extend(grid.obj_id.GetContents( '*.ElmTr3', 1))
- transf3 = []
- for trans in tous:
- wind1name = trans.bushv.cBusBar.cStatName
- wind1name = wind1name.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- for ii in range(len(buses)):
- if wind1name in buses[ii]:
- wind1number = ii
- break
- wind2name = trans.busmv.cBusBar.cStatName
- wind2name = wind2name.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- for ii in range(len(buses)):
- if wind2name in buses[ii]:
- wind2number = ii
- break
- wind3name = trans.buslv.cBusBar.cStatName
- wind3name = wind3name.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- for ii in range(len(buses)):
- if wind3name in buses[ii]:
- wind3number = ii
- break
- outs = trans.GetChildren(1, 'outserv.Charef', 1)
- if outs:
- if outs[0].outserv==0:
- outserv = outs[0].typ_id.curval
- else:
- outserv = trans.outserv
- else:
- outserv = trans.outserv
- if trans.outserv == 1 or outserv==1:
- currentHV = 0
- currentMV = 0
- currentLV = 0
- pourcenthv = 0
- pourcentmv = 0
- pourcentlv = 0
- flowPh = 0
- flowPm = 0
- flowPl = 0
- flowQh = 0
- flowQm = 0
- flowQl = 0
- else:
- currentHV = trans.GetAttribute('m:I:bushv') # courant en A du bus hv
- currentMV = trans.GetAttribute('m:I:busmv') # courant en A du bus mv
- currentLV = trans.GetAttribute('m:I:buslv') # courant en A du bus lv
- pourcenthv = trans.GetAttribute('c:loading_h') # taux de charge
- pourcentmv = trans.GetAttribute('c:loading_m') # taux de charge
- pourcentlv = trans.GetAttribute('c:loading_l') # taux de charge
- flowPh = trans.GetAttribute('m:P:bushv')
- flowPm = trans.GetAttribute('m:P:busmv')
- flowPl = trans.GetAttribute('m:P:buslv')
- flowQh = trans.GetAttribute('m:Q:bushv')
- flowQm = trans.GetAttribute('m:Q:busmv')
- flowQl = trans.GetAttribute('m:Q:buslv')
- # idtr3 = trans.nt3nm
- idtr3 = trans.loc_name
- aa = [wind1number, wind2number,wind3number,1, currentHV, pourcenthv, pourcenthv, pourcenthv, flowPh, flowQh, wind1name,wind2name,wind3name,idtr3,trans]
- transf3.append(aa)
- aa = [wind1number, wind2number, wind3number, 2, currentMV, pourcentmv, pourcentmv, pourcentmv, flowPm, flowQm,
- wind1name, wind2name, wind3name, idtr3, trans]
- transf3.append(aa)
- aa = [wind1number, wind2number, wind3number, 3, currentLV, pourcentlv, pourcentlv, pourcentlv, flowPl, flowQl,
- wind1name, wind2name, wind3name, idtr3, trans]
- transf3.append(aa)
-
- #Machines data (bus, inservice, id, pgen, qgen, mvabase, pmax, qmax, name)==============Generator
- tous=[]
- for grid in grids:
- tous.extend(grid.obj_id.GetContents( '*.ElmSym', 1))
- plants = []
- for plant in tous:
- if plant.i_mot==0:
- busname=plant.bus1.cBusBar.cStatName
- busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- for ii in range(len(buses)):
- if busname in buses[ii]:
- busnumber = ii
- break
- idplant = plant.loc_name#plant.ngnum
- outs=plant.GetChildren(1, 'outserv.Charef', 1)
- if outs:
- if outs[0].outserv == 0:
- outserv = outs[0].typ_id.curval
- else:
- outserv = plant.outserv
- else:
- outserv = plant.outserv
- if plant.outserv == 1 or outserv ==1 :
- pgen = 0
- qgen = 0
- else:
- pgen = plant.GetAttribute('m:P:bus1')
- qgen = plant.GetAttribute('m:Q:bus1')
- sn = plant.GetAttribute('t:sgn')
- pmax = plant.Pmax_uc
- # pmax = plant.P_max
- pmin = plant.Pmin_uc
- qmax = plant.cQ_max
- qmin = plant.cQ_min
- typ = 'ElmSym'
- aa=[busnumber,plant.outserv,idplant,pgen,qgen,sn,pmax,pmin,busname,pmin,qmin,plant, typ]
- plants.append(aa)
- ## __________________ Asynchrone ___________________________
- tous=[]
- for grid in grids:
- tous.extend(grid.obj_id.GetContents( '*.ElmAsm', 1))
- for plant in tous:
- if plant.i_mot==0:
- busname=plant.bus1.cBusBar.cStatName
- busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- for ii in range(len(buses)):
- if busname in buses[ii]:
- busnumber = ii
- break
- idplant = plant.loc_name#plant.ngnum
- outs = plant.GetChildren(1, 'outserv.Charef', 1)
- if outs:
- if outs[0].outserv == 0:
- outserv = outs[0].typ_id.curval
- else:
- outserv = plant.outserv
- else:
- outserv = plant.outserv
- if plant.outserv == 1 or outserv==1:
- pgen=0
- qgen = 0
- else:
- pgen = plant.GetAttribute('m:P:bus1')
- qgen = plant.GetAttribute('m:Q:bus1')
- sn = plant.GetAttribute('t:sgn')
- pmax = plant.Pmax_uc
- # pmax = plant.P_max
- pmin = plant.Pmin_uc
- qmax = plant.cQ_max
- qmin = plant.cQ_min
- typ = 'ElmAsm'
- aa=[busnumber, plant.outserv,idplant,pgen,qgen,sn,pmax,pmin,busname,pmin,qmin,plant,typ]
- plants.append(aa)
- ## _______________GenStat ________________
- tous = []
- for grid in grids:
- tous.extend(grid.obj_id.GetContents( '*.ElmGenstat', 1))
- for plant in tous:
- busname = plant.bus1.cBusBar.cStatName
- busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- for ii in range(len(buses)):
- if busname in buses[ii]:
- busnumber = ii
- break
- idplant = plant.loc_name # plant.ngnum
- outs = plant.GetChildren(1, 'outserv.Charef', 1)
- if outs:
- if outs[0].outserv==0:
- outserv = outs[0].typ_id.curval
- else:
- outserv = plant.outserv
- else:
- outserv = plant.outserv
- if plant.outserv == 1 or outserv == 1:
- pgen = 0
- qgen = 0
- else:
- pgen = plant.GetAttribute('m:P:bus1')
- qgen = plant.GetAttribute('m:Q:bus1')
- sn = plant.GetAttribute('e:sgn')
- pmax = plant.Pmax_uc
- # pmax = plant.P_max
- pmin = plant.Pmin_uc
- qmax = plant.cQ_max
- qmin = plant.cQ_min
- typ = 'ElmGenstat'
- aa = [busnumber, plant.outserv, idplant, pgen, qgen, sn, pmax, pmin, busname, pmin, qmin,plant, typ]
- plants.append(aa)
- ## ____________________ ElmPvsys ______________________________
- tous = []
- for grid in grids:
- tous.extend(grid.obj_id.GetContents( '*.ElmPvsys', 1))
- for plant in tous:
- busname = plant.bus1.cBusBar.cStatName
- busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- for ii in range(len(buses)):
- if busname in buses[ii]:
- busnumber = ii
- break
- idplant = plant.loc_name # plant.ngnum
- outs = plant.GetChildren(1, 'outserv.Charef', 1)
- if outs:
- if outs[0].outserv==0:
- outserv = outs[0].typ_id.curval
- else:
- outserv = plant.outserv
- else:
- outserv = plant.outserv
- if plant.outserv == 1 or outserv == 1:
- pgen = 0
- qgen = 0
- else:
- pgen = plant.GetAttribute('m:P:bus1')
- qgen = plant.GetAttribute('m:Q:bus1')
- sn = plant.GetAttribute('e:sgn')
- pmax = plant.Pmax_uc
- # pmax = plant.P_max
- pmin = plant.Pmin_uc
- qmax = plant.cQ_max
- qmin = plant.cQ_min
- typ = 'ElmPvsys'
- aa = [busnumber, plant.outserv, idplant, pgen, qgen, sn, pmax, pmin, busname, pmin, qmin,plant, typ]
- plants.append(aa)
- # Motors data (bus, active, reactive, status, name, id)===================== Motor
- tous=[]
- for grid in grids:
- tous.extend(grid.obj_id.GetContents( '*.ElmSym', 1))
- motors = []
- for motor in tous:
- if motor.i_mot == 1:
- busname = motor.bus1.cBusBar.cStatName
- busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- for ii in range(len(buses)):
- if busname in buses[ii]:
- busnumber = ii
- break
- idplant = motor.loc_name#motor.ngnum
- outs = motor.GetChildren(1, 'outserv.Charef', 1)
- if outs:
- if outs[0].outserv == 0:
- outserv = outs[0].typ_id.curval
- else:
- outserv = motor.outserv
- else:
- outserv = motor.outserv
- if plant.outserv == 1 or outserv == 1:
- pgen = 0
- qgen = 0
- else:
- pgen = motor.GetAttribute('m:P:bus1')
- qgen = motor.GetAttribute('m:Q:bus1')
- aa = [busnumber, pgen, qgen, plant.outserv, busname,idplant,motor]
- motors.append(aa)
- tous=[]
- for grid in grids:
- tous.extend(grid.obj_id.GetContents( '*.ElmAsm', 1))
- for motor in tous:
- if motor.i_mot == 1:
- busname = motor.bus1.cBusBar.cStatName
- busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- for ii in range(len(buses)):
- if busname in buses[ii]:
- busnumber = ii
- break
- idplant = motor.loc_name#motor.ngnum
- outs = motor.GetChildren(1, 'outserv.Charef', 1)
- if outs:
- if outs[0].outserv == 0:
- outserv = outs[0].typ_id.curval
- else:
- outserv = motor.outserv
- else:
- outserv = motor.outserv
- # outserv = motor.outserv
- if outserv == 1:
- pgen = 0
- qgen = 0
- else:
- pgen = motor.GetAttribute('m:P:bus1')
- qgen = motor.GetAttribute('m:Q:bus1')
- aa = [busnumber, pgen, qgen, motor.outserv, busname,idplant,motor]
- motors.append(aa)
-
- # Loads data (bus, active, reactive, status, name, id)===================== Load
- tous=[]
- for grid in grids:
- tous.extend(grid.obj_id.GetContents( '*.ElmLod', 1))
- tous = sorted(tous, key=lambda x: x.bus1.cBusBar.cStatName)
- loads = []
- for bus in buses:
- idload = 0
- for load in tous:
- busname = load.bus1.cBusBar.cStatName
- busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- if busname == bus[3]:
- idload += 1# cree id pour load
- busnumber = bus[0]
- # outserv = load.outserv
- outs = load.GetChildren(1, 'outserv.Charef', 1)
- if outs:
- if outs[0].outserv == 0:
- outserv = outs[0].typ_id.curval
- else:
- outserv = load.outserv
- else:
- outserv = load.outserv
- if load.outserv == 1 or outserv == 1:
- # if outserv == 1:
- pload = 0.0
- qload = 0.0
- else:
- pload = load.GetAttribute('m:P:bus1')
- qload = load.GetAttribute('m:Q:bus1') # qlini_a
- aa = [busnumber, pload, qload, load.outserv, busname, idload,load]
- loads.append(aa)
-
- #Fixed shunt data (number, MVAR, name, ...)========================== Fixed Shunt
- tous=[]
- for grid in grids:
- tous.extend(grid.obj_id.GetContents( '*.ElmShnt', 1))
- tous = sorted(tous, key=lambda x: x.bus1.cBusBar.cStatName)
- shunt = []
- for bus in buses:
- idshunt = 0
- for shunt1 in tous:
- if shunt1.ncapx==1:# nombre de step =1, considerer comme fix shunt pour equivalent a l'ancien programme sous PSSE
- busname = shunt1.bus1.cBusBar.cStatName
- busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- if busname == bus[3]:
- idshunt += 1 # cree id pour load
- busnumber = bus[0]
- qnom=shunt1.Qmax
- outs = shunt1.GetChildren(1, 'outserv.Charef', 1)
- if outs:
- if outs[0].outserv == 0:
- outserv = outs[0].typ_id.curval
- else:
- outserv = shunt1.outserv
- else:
- outserv = shunt1.outserv
- if outserv == 1:
- qshunt = 0
- else:
- qshunt = shunt1.GetAttribute('m:Q:bus1') # qlini_a
- aa = [busnumber, outserv, qshunt, busname,qnom, idshunt,bus,shunt1]
- shunt.append(aa)
- # Switched shunt data (number, status,MVAR, name,Qnom,id)================Swiched Shunt
- swshunt = []
- for bus in buses:
- idshunt = 0
- for shunt1 in tous:
- if shunt1.ncapx != 1: # nombre de step #1, considerer comme switche shunt pour etre equivalent avec l'ancien programme sous PSSE
- busname = shunt1.bus1.cBusBar.cStatName
- busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- if busname == bus[3]:
- idshunt += 1 # cree id pour load
- busnumber = bus[0]
- qnom = shunt1.Qmax
- outs = shunt1.GetChildren(1, 'outserv.Charef', 1)
- if outs:
- if outs[0].outserv == 0:
- outserv = outs[0].typ_id.curval
- else:
- outserv = shunt1.outserv
- else:
- outserv = shunt1.outserv
- if outserv == 1:
- qshunt = 0
- else:
- qshunt = shunt1.GetAttribute('m:Q:bus1') # qlini_a
- aa = [busnumber, outserv, qshunt, busname, qnom, idshunt,shunt1]
- swshunt.append(aa)
-
- return buses, lines, transf, plants, loads, shunt, motors, transf3, swshunt
-
-def read_pfd_simple(app,doc):
- ########################################################
- # ojectif de cette fonction: prendre les parametres du reseau
- ########################################################
- # si recal==1, recalculer loadflow
- prj = app.GetActiveProject()
- studycase=app.GetActiveStudyCase()
- grids=studycase.GetChildren(1,'*.ElmNet',1)[0].contents
- # if recal == 1:#calculer load-flow
- # ldf = app.GetFromStudyCase('ComLdf')
- # ldf.Execute() #run
- tous=[]
- for grid in grids:
- tous.extend(grid.obj_id.GetContents( '*.ElmTerm', 1))
- bus = []
- for noeud in tous:
- bus.append(noeud)
- noeuds = sorted(bus, key=lambda x: x.cStatName)
- buses = []
- for ii in range(len(noeuds)):
- busname = noeuds[ii].cStatName.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- aa = [ii, noeuds[ii].uknom, 1, busname, noeuds[ii].vmin,
- noeuds[ii].vmax, noeuds[ii].GetBusType(), 0,noeuds[ii]]
- # [numero,nominal KV,magnitude p.u, busname,Vmin,Vmax,type,angle degre,obj]
- buses.append(aa)
-
- #Machines data (bus, inservice, id, pgen, qgen, mvabase, pmax, qmax, name)==============Generator
- tous=[]
- for grid in grids:
- tous.extend(grid.obj_id.GetContents( '*.ElmSym', 1))
- plants = []
- for plant in tous:
- if plant.i_mot==0:
- busname=plant.bus1.cBusBar.cStatName
- busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- for ii in range(len(buses)):
- if busname in buses[ii]:
- busnumber = ii
- break
- idplant = plant.loc_name#plant.ngnum
- outs=plant.GetChildren(1, 'outserv.Charef', 1)
- if outs:
- if outs[0].outserv == 0:
- outserv = outs[0].typ_id.curval
- else:
- outserv = plant.outserv
- else:
- outserv = plant.outserv
-
- aa=[busnumber,outserv,idplant,0,0,0,0,0,busname,0,0,plant]
- plants.append(aa)
- tous=[]
- for grid in grids:
- tous.extend(grid.obj_id.GetContents( '*.ElmAsm', 1))
- for plant in tous:
- if plant.i_mot==0:
- busname=plant.bus1.cBusBar.cStatName
- busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- for ii in range(len(buses)):
- if busname in buses[ii]:
- busnumber = ii
- break
- idplant = plant.loc_name#plant.ngnum
- outs = plant.GetChildren(1, 'outserv.Charef', 1)
- if outs:
- if outs[0].outserv == 0:
- outserv = outs[0].typ_id.curval
- else:
- outserv = plant.outserv
- else:
- outserv = plant.outserv
- aa=[busnumber,outserv,idplant,0,0,0,0,0,busname,0,0,plant]
- plants.append(aa)
- tous = []
- for grid in grids:
- tous.extend(grid.obj_id.GetContents( '*.ElmGenstat', 1))
- for plant in tous:
- busname = plant.bus1.cBusBar.cStatName
- busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- for ii in range(len(buses)):
- if busname in buses[ii]:
- busnumber = ii
- break
- idplant = plant.loc_name # plant.ngnum
- outs = plant.GetChildren(1, 'outserv.Charef', 1)
- if outs:
- if outs[0].outserv==0:
- outserv = outs[0].typ_id.curval
- else:
- outserv = plant.outserv
- else:
- outserv = plant.outserv
- pgini = plant.pgini
- pgini_a = plant.pgini_a
-
- aa = [busnumber, outserv, idplant, 0, 0, 0, 0, 0, busname, 0, 0,plant, pgini, pgini_a]
- plants.append(aa)
- tous = []
- for grid in grids:
- tous.extend(grid.obj_id.GetContents( '*.ElmPvsys', 1))
- for plant in tous:
- busname = plant.bus1.cBusBar.cStatName
- busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- for ii in range(len(buses)):
- if busname in buses[ii]:
- busnumber = ii
- break
- idplant = plant.loc_name # plant.ngnum
- outs = plant.GetChildren(1, 'outserv.Charef', 1)
- if outs:
- if outs[0].outserv==0:
- outserv = outs[0].typ_id.curval
- else:
- outserv = plant.outserv
- else:
- outserv = plant.outserv
-
- aa = [busnumber, outserv, idplant, 0, 0, 0, 0, 0, busname, 0, 0,plant]
- plants.append(aa)
-
- # Loads data (bus, active, reactive, status, name, id)===================== Load
- tous=[]
- for grid in grids:
- tous.extend(grid.obj_id.GetContents( '*.ElmLod', 1))
- tous = sorted(tous, key=lambda x: x.bus1.cBusBar.cStatName)
- loads = []
- for bus in buses:
- idload = 0
- for load in tous:
- busname = load.bus1.cBusBar.cStatName
- busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- if busname == bus[3]:
- idload += 1# cree id pour load
- busnumber = bus[0]
- # outserv = load.outserv
- outs = load.GetChildren(1, 'outserv.Charef', 1)
- if outs:
- if outs[0].outserv == 0:
- outserv = outs[0].typ_id.curval
- else:
- outserv = load.outserv
- else:
- outserv = load.outserv
- if outserv == 1:
- pload = 0
- qload = 0
- else:
- pload = load.plini_a
- qload = load.qlini_a
- aa = [busnumber, pload, qload, outserv, busname, idload,load]
- loads.append(aa)
-
- #Fixed shunt data (number, MVAR, name, ...)========================== Fixed Shunt
- tous=[]
- for grid in grids:
- tous.extend(grid.obj_id.GetContents( '*.ElmShnt', 1))
- tous = sorted(tous, key=lambda x: x.bus1.cBusBar.cStatName)
- shunt = []
- for bus in buses:
- idshunt = 0
- for shunt1 in tous:
- if shunt1.ncapx==1:# nombre de step =1, considerer comme fix shunt pour equivalent a l'ancien programme sous PSSE
- busname = shunt1.bus1.cBusBar.cStatName
- busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- if busname == bus[3]:
- idshunt += 1 # cree id pour load
- busnumber = bus[0]
- qnom=shunt1.Qmax
- outs = shunt1.GetChildren(1, 'outserv.Charef', 1)
- if outs:
- if outs[0].outserv == 0:
- outserv = outs[0].typ_id.curval
- else:
- outserv = shunt1.outserv
- else:
- outserv = shunt1.outserv
- if outserv == 1:
- qshunt = 0
- else:
- qshunt = shunt1.Qact
- aa = [busnumber, outserv, qshunt, busname,qnom, idshunt,bus,shunt1]
- shunt.append(aa)
- # Switched shunt data (number, status,MVAR, name,Qnom,id)================Swiched Shunt
- swshunt = []
- for bus in buses:
- idshunt = 0
- for shunt1 in tous:
- if shunt1.ncapx != 1: # nombre de step #1, considerer comme switche shunt pour etre equivalent avec l'ancien programme sous PSSE
- busname = shunt1.bus1.cBusBar.cStatName
- busname = busname.replace('/','_').replace(')','_').replace('(','_').replace(" ","_").replace("-","_").replace(".","_").replace("&","and").replace("%","pct").replace("=","eq").replace("#","_").replace("$","_")
- if busname == bus[3]:
- idshunt += 1 # cree id pour load
- busnumber = bus[0]
- qnom = shunt1.Qmax
- outs = shunt1.GetChildren(1, 'outserv.Charef', 1)
- if outs:
- if outs[0].outserv == 0:
- outserv = outs[0].typ_id.curval
- else:
- outserv = shunt1.outserv
- else:
- outserv = shunt1.outserv
- if outserv == 1:
- qshunt = 0
- else:
- qshunt = shunt1.Qact
- aa = [busnumber, outserv, qshunt, busname, qnom, idshunt,shunt1]
- swshunt.append(aa)
-
- return plants, loads, shunt, swshunt
-
-
-
-#def read_change(app,scn,settriger_iter):
-#######################################################################BEGIN RECUPERE
-# prj = app.GetActiveProject()
-# #s'il y a plusieurs grids activés
-# studycase=app.GetActiveStudyCase()
-# grids=studycase.GetChildren(1,'*.ElmNet',1)[0].contents
-#
-# tous=[]
-# for grid in grids:
-# tous.extend(grid.obj_id.GetContents( '*.ElmTerm', 1))
-# bus = []
-# for noeud in tous:
-# bus.append(noeud)
-# noeuds = sorted(bus, key=lambda x: x.cStatName)
-# buses = []
-# for ii in range(len(noeuds)):
-# aa = [ii, noeuds[ii].uknom, noeuds[ii].GetAttribute('m:u'), noeuds[ii].cStatName, noeuds[ii].vmin,
-# noeuds[ii].vmax, noeuds[ii].GetBusType(), noeuds[ii].GetAttribute('m:phiu'),noeuds[ii]]
-# # [numero,nominal KV,magnitude p.u, busname,Vmin,Vmax,type,angle degre,obj]
-# buses.append(aa)
-# # ##== == == == == == == == == == = Line===================== Line===================== Line
-# # # lignes = app.GetCalcRelevantObjects('*.ElmLne', 0)
-# # tous=[]
-# # for grid in grids:
-# # tous.extend(grid.obj_id.GetContents( '*.ElmLne', 1))
-# # lines=[]
-# # for line in tous:
-# # frombus_name=line.bus1.cBusBar.cStatName
-# # for ii in range(len(buses)):
-# # if frombus_name in buses[ii]:
-# # frombus_number=ii
-# # break
-# # tobus_name=line.bus2.cBusBar.cStatName
-# # for ii in range(len(buses)):
-# # if tobus_name in buses[ii]:
-# # tobus_number=ii
-# # break
-# # currentA=line.GetAttribute('m:I:bus1') #courant en A de la ligne
-# # pourcent=line.GetAttribute('c:loading') # taux de charge de la ligne
-# # flowP=line.GetAttribute('m:P:bus1')
-# # flowQ = line.GetAttribute('m:Q:bus1')
-# # idline=line.loc_name#line.nlnum
-# # aa=[frombus_number,tobus_number,currentA,pourcent,pourcent,pourcent,flowP,flowQ,frombus_name,tobus_name,idline,line]
-# # lines.append(aa)
-#
-# # # 2 windings transformers data (from, to, amps, rate%a, ploss, qloss)
-# # tous=[]
-# # for grid in grids:
-# # tous.extend(grid.obj_id.GetContents( '*.ElmTr2', 1))
-# # transf=[]
-# # for trans in tous:
-# # frombus_name=trans.bushv.cBusBar.cStatName
-# # for ii in range(len(buses)):
-# # if frombus_name in buses[ii]:
-# # frombus_number=ii
-# # break
-# # tobus_name=trans.buslv.cBusBar.cStatName
-# # for ii in range(len(buses)):
-# # if tobus_name in buses[ii]:
-# # tobus_number=ii
-# # break
-# # currentA=trans.GetAttribute('m:I:bushv') #courant en A du bus hv
-# # pourcent=trans.GetAttribute('c:loading') # taux de charge
-# # flowP=trans.GetAttribute('m:P:bushv')
-# # flowQ = trans.GetAttribute('m:Q:bushv')
-# # idline=trans.ntnum
-# # aa=[frombus_number,tobus_number,currentA,pourcent,pourcent,pourcent,flowP,flowQ,frombus_name,tobus_name,idline,trans]
-# # transf.append(aa)
-# # #3 windings transformers data (from, to, amps, rate%a, ploss, qloss)
-# # tous=[]
-# # for grid in grids:
-# # tous.extend(grid.obj_id.GetContents( '*.ElmTr3', 1))
-# # transf3 = []
-# # for trans in tous:
-# # wind1name = trans.bushv.cBusBar.cStatName
-# # for ii in range(len(buses)):
-# # if wind1name in buses[ii]:
-# # wind1number = ii
-# # break
-# # wind2name = trans.busmv.cBusBar.cStatName
-# # for ii in range(len(buses)):
-# # if wind2name in buses[ii]:
-# # wind2number = ii
-# # break
-# # wind3name = trans.buslv.cBusBar.cStatName
-# # for ii in range(len(buses)):
-# # if wind3name in buses[ii]:
-# # wind3number = ii
-# # break
-# # if trans.outserv==1:
-# # currentA = 0 # courant en A du bus hv
-# # pourcent = 0 # taux de charge
-# # flowP =0
-# # flowQ = 0
-# # else:
-# # currentA = trans.GetAttribute('m:I:bushv') # courant en A du bus hv
-# # pourcent = trans.GetAttribute('c:loading') # taux de charge
-# # flowP = trans.GetAttribute('m:P:bushv')
-# # flowQ = trans.GetAttribute('m:Q:bushv')
-# # idline = trans.nt3nm
-# # aa = [wind1number, wind2number,wind3number,3, currentA, pourcent, pourcent, pourcent, flowP, flowQ, wind1name,wind2name,wind3name,idline,trans]
-# # transf3.append(aa)
-#
-# #Machines data (bus, inservice, id, pgen, qgen, mvabase, pmax, qmax, name)==============Generator
-# tous=[]
-# for grid in grids:
-# tous.extend(grid.obj_id.GetContents( '*.ElmSym', 1))
-# plants = []
-# for plant in tous:
-# if plant.i_mot==0:
-# busname=plant.bus1.cBusBar.cStatName
-# for ii in range(len(buses)):
-# if busname in buses[ii]:
-# busnumber = ii
-# break
-# idplant = plant.loc_name#plant.ngnum
-# outserv = plant.outserv
-# if outserv == 1:
-# pgen = 0
-# qgen = 0
-# else:
-# pgen = plant.GetAttribute('m:P:bus1')
-# qgen = plant.GetAttribute('m:Q:bus1')
-# sn = plant.GetAttribute('t:sgn')
-# pmax = plant.Pmax_uc
-# pmin = plant.Pmin_uc
-# qmax = plant.cQ_max
-# qmin = plant.cQ_min
-# aa=[busnumber,outserv,idplant,pgen,qgen,sn,pmax,pmin,busname,pmin,qmin,plant]
-# plants.append(aa)
-# tous=[]
-# for grid in grids:
-# tous.extend(grid.obj_id.GetContents( '*.ElmAsm', 1))
-# for plant in tous:
-# if plant.i_mot==0:
-# busname=plant.bus1.cBusBar.cStatName
-# for ii in range(len(buses)):
-# if busname in buses[ii]:
-# busnumber = ii
-# break
-# idplant = plant.loc_name#plant.ngnum
-# outserv=plant.outserv
-# if outserv==1:
-# pgen=0
-# qgen = 0
-# else:
-# pgen = plant.GetAttribute('m:P:bus1')
-# qgen = plant.GetAttribute('m:Q:bus1')
-# sn = plant.GetAttribute('t:sgn')
-# pmax = plant.Pmax_uc
-# pmin = plant.Pmin_uc
-# qmax = plant.cQ_max
-# qmin = plant.cQ_min
-# aa=[busnumber,outserv,idplant,pgen,qgen,sn,pmax,pmin,busname,pmin,qmin,plant]
-# plants.append(aa)
-# # tous = []
-# # for grid in grids:
-# # tous.extend(grid.obj_id.GetContents( '*.ElmGenstat', 1))
-# # for plant in tous:
-# # busname = plant.bus1.cBusBar.cStatName
-# # for ii in range(len(buses)):
-# # if busname in buses[ii]:
-# # busnumber = ii
-# # break
-# # idplant = plant.loc_name # plant.ngnum
-# # outserv = plant.outserv
-# # if outserv == 1:
-# # pgen = 0
-# # qgen = 0
-# # else:
-# # pgen = plant.GetAttribute('m:P:bus1')
-# # qgen = plant.GetAttribute('m:Q:bus1')
-# # sn = plant.GetAttribute('e:sgn')
-# # pmax = plant.Pmax_uc
-# # pmin = plant.Pmin_uc
-# # qmax = plant.cQ_max
-# # qmin = plant.cQ_min
-# # aa = [busnumber, outserv, idplant, pgen, qgen, sn, pmax, pmin, busname, pmin, qmin,plant]
-# # plants.append(aa)
-# # tous = []
-# # for grid in grids:
-# # tous.extend(grid.obj_id.GetContents( '*.ElmPvsys', 1))
-# # for plant in tous:
-# # busname = plant.bus1.cBusBar.cStatName
-# # for ii in range(len(buses)):
-# # if busname in buses[ii]:
-# # busnumber = ii
-# # break
-# # idplant = plant.loc_name # plant.ngnum
-# # outserv = plant.outserv
-# # if outserv == 1:
-# # pgen = 0
-# # qgen = 0
-# # else:
-# # pgen = plant.GetAttribute('m:P:bus1')
-# # qgen = plant.GetAttribute('m:Q:bus1')
-# # sn = plant.GetAttribute('e:sgn')
-# # pmax = plant.Pmax_uc
-# # pmin = plant.Pmin_uc
-# # qmax = plant.cQ_max
-# # qmin = plant.cQ_min
-# # aa = [busnumber, outserv, idplant, pgen, qgen, sn, pmax, pmin, busname, pmin, qmin,plant]
-# # plants.append(aa)
-# tous=[]
-# # Motors data (bus, active, reactive, status, name, id)===================== Motor
-#
-# for grid in grids:
-# tous.extend(grid.obj_id.GetContents( '*.ElmSym', 1))
-# motors = []
-# for motor in tous:
-# if motor.i_mot == 1:
-# busname = motor.bus1.cBusBar.cStatName
-# for ii in range(len(buses)):
-# if busname in buses[ii]:
-# busnumber = ii
-# break
-# idplant = motor.loc_name#motor.ngnum
-# outserv = motor.outserv
-# if outserv == 1:
-# pgen = 0
-# qgen = 0
-# else:
-# pgen = motor.GetAttribute('m:P:bus1')
-# qgen = motor.GetAttribute('m:Q:bus1')
-# aa = [busnumber, pgen, qgen, outserv, busname,idplant,motor]
-# motors.append(aa)
-# tous=[]
-# for grid in grids:
-# tous.extend(grid.obj_id.GetContents( '*.ElmAsm', 1))
-# for motor in tous:
-# if motor.i_mot == 1:
-# busname = motor.bus1.cBusBar.cStatName
-# for ii in range(len(buses)):
-# if busname in buses[ii]:
-# busnumber = ii
-# break
-# idplant = motor.loc_name#motor.ngnum
-# outserv = motor.outserv
-# if outserv == 1:
-# pgen = 0
-# qgen = 0
-# else:
-# pgen = motor.GetAttribute('m:P:bus1')
-# qgen = motor.GetAttribute('m:Q:bus1')
-# aa = [busnumber, pgen, qgen, outserv, busname,idplant]
-# motors.append(aa)
-#
-# # Loads data (bus, active, reactive, status, name, id)===================== Load
-# tous=[]
-# for grid in grids:
-# tous.extend(grid.obj_id.GetContents( '*.ElmLod', 1))
-# tous = sorted(tous, key=lambda x: x.bus1.cBusBar.cStatName)
-# loads = []
-# for bus in buses:
-# idload = 0
-# for load in tous:
-# busname = load.bus1.cBusBar.cStatName
-# if busname == bus[3]:
-# idload += 1# cree id pour load
-# busnumber = bus[0]
-# outserv = load.outserv
-# if outserv == 1:
-# pload = 0
-# qload = 0
-# else:
-# pload = load.GetAttribute('m:P:bus1')
-# qload = load.GetAttribute('m:Q:bus1') # qlini_a
-# aa = [busnumber, pload, qload, outserv, busname, idload,load]
-# loads.append(aa)
-# #Fixed shunt data (number, MVAR, name, ...)========================== Fixed Shunt
-# tous=[]
-# for grid in grids:
-# tous.extend(grid.obj_id.GetContents( '*.ElmShnt', 1))
-# tous = sorted(tous, key=lambda x: x.bus1.cBusBar.cStatName)
-# shunt = []
-# for bus in buses:
-# idshunt = 0
-# for shunt1 in tous:
-# if shunt1.ncapx==1:# nombre de step =1, considerer comme fix shunt pour equivalent a l'ancien programme sous PSSE
-# busname = shunt1.bus1.cBusBar.cStatName
-# if busname == bus[3]:
-# idshunt += 1 # cree id pour load
-# busnumber = bus[0]
-# qnom=shunt1.Qmax
-# outserv = shunt1.outserv
-# if outserv == 1:
-# qshunt = 0
-# else:
-# qshunt = shunt1.GetAttribute('m:Q:bus1') # qlini_a
-# aa = [busnumber, outserv, qshunt, busname,qnom, idshunt,bus,shunt1]
-# shunt.append(aa)
-# # Switched shunt data (number, status,MVAR, name,Qnom,id)================Swiched Shunt
-# swshunt = []
-# for bus in buses:
-# idshunt = 0
-# for shunt1 in tous:
-# if shunt1.ncapx != 1: # nombre de step #1, considerer comme switche shunt pour etre equivalent avec l'ancien programme sous PSSE
-# busname = shunt1.bus1.cBusBar.cStatName
-# if busname == bus[3]:
-# idshunt += 1 # cree id pour load
-# busnumber = bus[0]
-# qnom = shunt1.Qmax
-# outserv = shunt1.outserv
-# if outserv == 1:
-# qshunt = 0
-# else:
-# qshunt = shunt1.GetAttribute('m:Q:bus1') # qlini_a
-# aa = [busnumber, outserv, qshunt, busname, qnom, idshunt,shunt1]
-# swshunt.append(aa)
-#
-#######################################################################END RECUPERE
-# settriger_iter.outserv = 1
-# app.SaveAsScenario(scn, 1)
-# # app.Show()# app.SaveAsScenario(scn)
-# for plant in plants:
-# plant[11].pgini=plant[3]-0.01
-# # plant[11].qgini = plant[4]
-# for load in loads:
-# load[6].plini = load[1]
-# load[6].qlini = load[2]
-# scenario_temporaire = app.GetActiveScenario()
-# scenario_temporaire.Save()
-#
-#
-# # app.SaveAsScenario(scn,1)
-# aa=1
-#
-# # return buses, lines, transf, plants, loads, shunt, motors, transf3, swshunt
-
-
-
-def MyLogger(x,y,z,logCSVfilename,ite):
- f=open(logCSVfilename, 'a')
- f.write(str(ite)+';')
- f.write(";")
- nx = len(x)
- for i in range(0,nx):
- f.write(str(x[i]))#f.write("%f;" % (x[i]))
- f.write(";")
- f.write(";")
- nz = len(z)
- for i in range(0,nz):
- try:
- f.write("%f;" % (z[i]))
- except:
- f.write(str(z[i])+";")
- f.write(";")
- ny = len(y)
- for j in range(0,ny):
- f.write("%f;" % (y[j]))
- f.write("\n")
- f.close()
-
-
-# Fonction pour ecrire un fichier de sortie type csv pour chaque type de grandeur de sortie
-def MyMultiLogger (x, y, sizeY, z, ite, folder, day, fich, hour):
- global ny
- y0=0
- for fich in range (np.size(sizeY,0)):
- multilogfilename=folder+"/N"+day+"/Y"+str(fich)+"simulationDClog_"+hour+".csv"
- f=open(multilogfilename, 'a')
- f.write("%f;" % (ite))
- f.write(";")
- nx = len(x)
- for i in range(0,nx):
- f.write("%f;" % (x[i]))
- f.write(";")
- nz = len(z)
- for i in range(0,nz):
- f.write("%f;" % (z[i]))
- f.write(";")
- ny = sizeY[fich]
- for j in range(0,ny):
- f.write("%f;" % (y[j+y0]))
- f.write("\n")
- f.close()
- y0 += ny
- print ("Fichiers "+str(ite)+" enregistres\n\n")
-
-# Analyses graphiques
-def graphical_out (inputSample, outputSampleAll, inputDim, outputDim, montecarlosize) :
- print ("\n\n\n Writing graphical analysis files...")
- # A Pairwise scatter plot of the inputs
- myGraph = Graph()
- myPairs = Pairs(inputSample, 'Inputs relations', inputSample.getDescription(), "red", "bullet")
- myGraph.add(Drawable(myPairs))
- myGraph.draw("Input Samples",640,480,GraphImplementation.PDF)
- #View(myGraph.getBitmap())
- print ('Input pairwise scatterplot done...')
-
- # A Pairwise scatter plot of the outputs
- myGraph = Graph()
- myPairs = Pairs(outputSampleAll, 'Output relations', outputSampleAll.getDescription(), "red", "bullet")
- myGraph.add(Drawable(myPairs))
- myGraph.draw("Output Samples",640,480,GraphImplementation.PDF)
- #View(myGraph.getBitmap())
- print ('Output pairwise scatterplot done...')
-
- # A Pairwise scatter plot of the inputs/outputs
- # Draw all scatter plots yj vs xi
- for j in range(outputDim):
- outputSamplej=outputSampleAll.getMarginal(j)
- Ylabelstr=outputSamplej.getDescription()[0]
- for i in range(inputDim):
- inputSamplei=inputSample.getMarginal(i)
- Xlabelstr=inputSamplei.getDescription()[0]
- X=NumericalSample(montecarlosize,2)
- for k in range(montecarlosize):
- X[k,0]=inputSamplei[k][0]
- X[k,1]=outputSamplej[k][0]
- myGraph = Graph()
- myCloud=Cloud(X);
- mytitle=Ylabelstr+"vs"+Xlabelstr
- myGraph.add(Drawable(myCloud))
- myGraph.setAxes(1)
- myGraph.setXTitle(Xlabelstr)
- myGraph.setYTitle(Ylabelstr)
- myGraph.draw(mytitle,640,480,GraphImplementation.PDF)
- #ViewImage(myGraph.getBitmap())
- print( 'Input/Output pairwise scatterplot done...')
-
- # An histogram of the inputs
- for i in range(inputDim):
- inputSamplei=inputSample.getMarginal(i)
- myGraph = VisualTest.DrawHistogram(inputSamplei)
- labelarray=inputSamplei.getDescription()
- labelstr=labelarray[0]
- myGraph.setTitle(labelstr)
- myGraph.setName(labelstr)
- myGraph.setXTitle(labelstr)
- myGraph.setYTitle("Frequency")
- myGraph.draw(labelstr,640,480,GraphImplementation.PDF)
- #View(myGraph.getBitmap())
- print ('Input histogram done...')
-
- # An histogram of the outputs
- for j in range(outputDim):
- outputSamplej=outputSampleAll.getMarginal(j)
- myGraph = VisualTest.DrawHistogram(outputSamplej)
- labelarray=outputSamplej.getDescription()
- labelstr=labelarray[0]
- myGraph.setTitle(labelstr)
- myGraph.setName(labelstr)
- myGraph.setXTitle(labelstr)
- myGraph.setYTitle("Frequency")
- myGraph.draw(labelstr,640,480,GraphImplementation.PDF)
- #View(myGraph.getBitmap())
- print ('Output histogram done')
- print ('Graphical output terminated')
-
-
-def config_contingency(LinesList,GroupsList,TransformersList,LoadsList,MotorsList) :
-
- lines_con=[]
- groups_con=[]
- loads_con = []
- transfos_con = []
- motors_con = []
- sizeLines = len(LinesList)
- sizeGroups = len(GroupsList)
- sizeTransfos = len(TransformersList)
- sizeLoads = len(LoadsList)
- sizeMotors = len(MotorsList)
- val=[]
- prob=[]
-
- for i in range(sizeLines+sizeGroups+sizeTransfos + sizeLoads + sizeMotors) :
- val.append(int(i))
- for i in range (sizeLines) :
- lines_con.append(LinesList[i][0])
- prob.append(LinesList[i][1])
- for i in range (sizeGroups) :
- prob.append(GroupsList[i][1])
- groups_con.append(GroupsList[i][0])
- for i in range (sizeTransfos) :
- prob.append(TransformersList[i][1])
- transfos_con.append(TransformersList[i][0])
- for i in range (sizeLoads) :
- prob.append(LoadsList[i][1])
- loads_con.append(LoadsList[i][0])
- for i in range (sizeMotors) :
- prob.append(MotorsList[i][1])
- motors_con.append(MotorsList[i][0])
-
- return lines_con, groups_con, transfos_con, loads_con, motors_con, val, prob
-
-def LoadARMA(time_serie_file, time_serie_SS, time_serie_TH) :
- f=open(time_serie_file,"r")
- lines=f.readlines()
- N=len(lines)
- Xt=[]
- for i in range(N) :
- Xt.append([float(lines[i])])
-
- myTG=RegularGrid(0,float(time_serie_SS),N)
- TS=TimeSeries(myTG,NumericalSample(Xt))
- myWN=WhiteNoise(Distribution(Normal(0,1)),myTG)
- myState=ARMAState(TS.getSample(),NumericalSample())
- p=12
- q=0
- d=1
- myFactory = ARMALikelihoodFactory ( p , q , d )
- myARMA = myFactory.build(TS)
-
- myARMA.setState(myState)
-
- AR = myARMA.getARCoefficients()
- MA = myARMA.getMACoefficients()
-
- ts = myARMA.getRealization()
- ts.setName('A realization')
- myTSGraph=ts.drawMarginal(0)
- myTSGraph.draw('Realization'+str(p)+","+str(q),640,480,GraphImplementation.PDF)
- myARMAState=myARMA.getState()
-
- #Make a prediction of the future on next Nit instants
- Nit = int(time_serie_TH)
- myARMA2=ARMA(AR,MA,myWN,myARMAState)
- possibleFuture=myARMA2.getFuture(Nit)
- possibleFuture.setName('Possible future')
-
- Xt2=[]
- for i in range (len(possibleFuture)):
- Xt2.append(possibleFuture.getValueAtIndex(i)[0])
- Max=float(max(Xt2))
- Min=float(min(Xt2))
- h=float(Max-Min)
- for i in range (len(possibleFuture)):
- value= (Xt2[i]-Min+h/3)/(Max-Min+h/3)
- possibleFuture.setValueAtIndex(i,NumericalPoint(1,value))
-
- myFG=possibleFuture.drawMarginal(0)
- myFG.draw('Future'+str(Nit),640,480,GraphImplementation.PDF)
-
- return possibleFuture
-
-def LoadTS(time_serie_file) :
- TS=[]
- for i in range(len(time_serie_file)) :
- if time_serie_file[i] == -1 :
- pass
- else :
- f=open(time_serie_file[i],"r")
- lines=f.readlines()
- N=len(lines)
- Xt=[]
- for j in range(N) :
- try :
- float(lines[i])
- except ValueError :
- lines[i] = commaToPoint(lines[i])
- else :
- pass
- Xt.append([float(lines[j])])
- TS.append(Xt)
- return TS
-
-def KSDist(lines) :
- print( "Creating Kernel Smoothing distribution ")
- N=len(lines)
- Xt=[]
- for i in range(N) :
- if lines[i] == "\n" :
- print( "End of file")
- break
- else :
- try :
- float(lines[i])
- except ValueError :
- lines[i] = commaToPoint(lines[i])
- else :
- pass
- Xt.append([float(lines[i])])
- NS=NumericalSample(Xt)
- kernel=KernelSmoothing(Uniform())
- myBandwith = kernel.computeSilvermanBandwidth(NS)
- ##for openturns 1.6
- #KS=kernel.build(NS,myBandwith,1)
-
- #for openturns 1.8
- KS=kernel.build(NS,myBandwith)
- kernel.setBoundaryCorrection(True)
- return KS
-
-
-def threshold (inputRandomVector, outputVariableOfInterest,pssefun,inputDistribution) :
- # We create a quadraticCumul algorithm
- myQuadraticCumul = QuadraticCumul(outputVariableOfInterest)
-
- # We compute the several elements provided by the quadratic cumul algorithm
- # and evaluate the number of calculus needed
- nbBefr = pssefun.getEvaluationCallsNumber()
-
- # Mean first order
- meanFirstOrder = myQuadraticCumul.getMeanFirstOrder()[0]
- nbAfter1 = pssefun.getEvaluationCallsNumber()
-
- # Mean second order
- meanSecondOrder = myQuadraticCumul.getMeanSecondOrder()[0]
- nbAfter2 = pssefun.getEvaluationCallsNumber()
-
- # Standard deviation
- stdDeviation = sqrt(myQuadraticCumul.getCovariance()[0,0])
- nbAfter3 = pssefun.getEvaluationCallsNumber()
-
- print( "First order mean=", myQuadraticCumul.getMeanFirstOrder()[0])
- print( "Evaluation calls number = ", nbAfter1 - nbBefr)
- print( "Second order mean=", myQuadraticCumul.getMeanSecondOrder()[0])
- print( "Evaluation calls number = ", nbAfter2 - nbAfter1)
- print ("Standard deviation=", sqrt(myQuadraticCumul.getCovariance()[0,0]))
- print( "Evaluation calls number = ", nbAfter3 - nbAfter2)
-
- print ( "Importance factors=")
- for i in range(inputRandomVector.getDimension()) :
- print(inputDistribution.getDescription()[i], " = ", myQuadraticCumul.getImportanceFactors()[i])
- print ("")
-
-def getUserDefined (values):
- val = []
- prob = []
- for a in values:
- val.append(a[0])
- prob.append(a[1])
- dim = len (val)
-
- prob = list(map(float,prob))
- prob = [p/sum(prob) for p in prob]
-
-## weights = NumericalPoint(prob)
-## Vals = []
-## for i in range(dim):
-## Vals.append([float(val[i]),float(val[i])+0.000001])
-## ranges = NumericalSample(Vals)
-## return UserDefined(ranges, weights)
- coll = UserDefinedPairCollection()
- for i in range (dim) :
- UDpair=UserDefinedPair(NumericalPoint(1,float(val[i])),float(prob[i]))
- coll.add(UDpair)
- return UserDefined(coll)
-
-def getHistogram (values) :
- step = []
- prob = []
- for a in values:
- step.append(a[0])
- prob.append(a[1])
- dim = len (step)
- myHistogram = HistogramPairCollection(dim)
- for i in range (dim) :
- try:
- myHistogram[i]=HistogramPair(float(step[i]),float(prob[i]))
- except:
- pass
- return myHistogram
-
-def getUserLaw(LawDico):
- time_serie = 0
- time_serie_file = ''
- time_serie_SS = 0
- time_serie_TH = 0
- if LawDico['Law']=="Normal":
- law = Normal(float(LawDico['Mu']),float(LawDico['Sigma']))#Openturns
- elif LawDico['Law']=="Uniform":
- law=Uniform(float(LawDico['A']),float(LawDico['B']))
- elif LawDico['Law']=="Exponential":
- law=Exponential(float(LawDico['Lambda']),float(LawDico['Gamma']))
- elif LawDico['Law']=="Weibull":
- if LawDico['Settings']=='AlphaBeta':
- law=Weibull(float(LawDico['Alpha']),float(LawDico['Beta']),float(LawDico['Gamma']))
- elif LawDico['Settings']=='MuSigma':
- law=Weibull(float(LawDico['Mu']),float(LawDico['Sigma']),float(LawDico['Gamma']),Weibull.MUSIGMA)
- elif LawDico['Law']=="TruncatedNormal":
- law=TruncatedNormal(float(LawDico['MuN']),float(LawDico['SigmaN']),float(LawDico['A']),float(LawDico['B']))
- elif LawDico['Law']=="UserDefined":
- law=UserDefined(getUserDefined (LawDico['Values']))
- elif LawDico['Law']=="Histogram":
- law=Histogram(LawDico['First'], getHistogram (LawDico['Values']))
- elif LawDico['Law']=="PDF_from_file":
- law=KSDist(LawDico['FileContents'])
- elif LawDico['Law']=="TimeSeries_from_file":
- law = Uniform(0.999999,1)
- time_serie=1
- time_serie_file=LawDico['FileContents']
- else :
- law = Uniform(0.999999,1)
- return law, [time_serie, time_serie_file] #[time_serie, time_serie_file, time_serie_SS, time_serie_TH]
-
-def contingency_automatic (dfxPath, acccPath, rate) :
- psspy.accc_with_dsp_3( 0.5,[0,0,0,1,1,2,0,0,0,0,0],r"""ALL""",dfxPath,acccPath,"","","")
- psspy.accc_single_run_report_4([1,int(rate),int(rate),1,1,0,1,0,0,0,0,0],[0,0,0,0,6000],[ 0.5, 5.0, 100.0,0.0,0.0,0.0, 99999.],acccPath)
-
- rslt_summary=pssarrays.accc_summary(acccPath)
- if int(rate) == 1 :
- rate = rslt_summary.rating.a
- elif int(rate) == 2 :
- rate = rslt_summary.rating.b
- elif int(rate) == 3 :
- rate = rslt_summary.rating.c
- else :
- print( "NO RATE CHOOSEN")
-
- Labels=rlst.colabel
- contin_load=[]
- for label in Labels :
- t=[]
- rslt=pssarrays.accc_solution(acccPath,contingency,label,0.5,5.0)
- ampFlow=rslt.ampflow
- for i in range (len(rA)) :
- t.append(ampFlow[i]/rate[i])
- contin_load.append(t)
- return contin_load
-
-def commaToPoint (string) :
- stringReplaced = string.replace(',','.')
- return stringReplaced
-
-def PFFunct(dico,x):
- # start1 = time.clock();
- stop = time.clock(); start = stop;
- Output = []
- LS = []
- FS = []
- Pmachine = []
- LStable = []
- FStable = []
- LS_beforeUC = []
- FS_beforeUC = []
- Pmachine_beforeUC = []
- LStable_beforeUC = []
- FStable_beforeUC = []
- Output_beforeUC = []
- flag_error=0
-
- num_pac = dico['num_pac']
- logCSVfilename = dico['logCSVfilename']
-
- inputSample = []
- for ite in range(len(x)):
- inputSample.append(np.array(x[ite]))
-
- TStest = dico['TStest']
- Xt = dico['Xt']
- folder = dico['folder']
- folderN_1 = dico['folderN_1']
- day = dico['day']
- doc_base = dico['doc_base']
- os.chdir(doc_base) # to work in right directory of the package
- PFParams = dico['PFParams']
-
- continLines = dico['continLines']
- continGroups = dico['continGroups']
- continTransfos = dico['continTransfos']
- continLoads = dico['continLoads']
- continMotors = dico['continMotors']
- continVal = dico['continVal']
- continProb = dico['continProb']
- position = dico['position']
- timeVect = dico['timeVect']
- LawsList = dico['CorrMatrix']['laws']
- N_1_LINES = dico['N_1_LINES']
- N_1_TRANSFORMERS = dico['N_1_TRANSFORMERS']
- N_1_MOTORS = dico['N_1_MOTORS']
- N_1_LOADS = dico['N_1_LOADS']
- N_1_GENERATORS = dico['N_1_GENERATORS']
- # nombre d'element N_1
- nN1 = len(N_1_LINES) + len(N_1_TRANSFORMERS) + len(N_1_MOTORS) + len(N_1_LOADS) + len(N_1_GENERATORS)
- x_copy = []
-
- for ite in range(len(x)):
- xite = []
- for j in range(len(x[ite])):
- xite.append(x[ite][j])
- x_copy.append(xite)
-
- for ite in range(len(x)):
- if TStest == 1:
- for i, law in enumerate(LawsList):
- if Xt[ite][i] == -1:
- if law != 'N_1_fromFile':
- if 'Unavailability' in dico['Laws'][law]['Type']:
- status = int(round(x[ite][i])) # idealement on a tiré un chiffre entre 0 et 1, 0 et 1 inclus
- status = min(status, 1) # on force status à avoir une valeur 0 ou 1
- status = max(status, 0)
- x_copy[ite][i] = status
- if dico['Laws'][law]['ComponentType'] == 'Generator' and 'Level' in dico['Laws'][law]['Type']:
- if dico['Laws'][law]['TransferFunction'] == True:
- if dico['Laws'][law]['TF_Input'] == '.pow file':
- z_WS = dico['Laws'][law]['Wind_Speed_Measurement_Height']
- pathWT = dico['Laws'][law]['File_Name']
- HH = dico['Laws'][law]['Hub_Height']
- alpha = dico['Laws'][law]['AlphaWS']
- PercentLoss = dico['Laws'][law]['Percent_Losses']
- x_copy[ite][i] = eol(np.array([x[ite][i]]), z_WS, pathWT, HH, alpha, PercentLoss)[0]
- elif dico['Laws'][law]['TF_Input'] == 'tuples list':
- x_copy[ite][i] = applyTF(x[ite][i], dico['Laws'][law]['TF_Values'])
- else: # ensure values are between 0 and 1
- Pval = x[ite][i]
- Pval = min(Pval, 1)
- Pval = max(Pval, 0)
- x_copy[ite][i] = Pval
- else: # law=='N_1_fromFile"
- x_copy[ite][i] == int(floor(x[ite][i]))
-
- else:
- x_copy[ite][i] = float(Xt[ite][i]) # Dans le cas d'une etude temporelle on lui donne la valeur de Xt
-
- else:
- for i, law in enumerate(LawsList):
- if law != 'N_1_fromFile':
- if 'Unavailability' in dico['Laws'][law]['Type']:
- status = int(round(x[ite][i])) # idealement on a tiré un chiffre entre 0 et 1, 0 et 1 inclus
- status = min(status, 1) # on force status à avoir une valeur 0 ou 1
- status = max(status, 0)
- x_copy[ite][i] = status
- if dico['Laws'][law]['ComponentType'] == 'Generator' and 'Level' in dico['Laws'][law]['Type']:
- if dico['Laws'][law]['TransferFunction'] == True:
- if dico['Laws'][law]['TF_Input'] == '.pow file':
- z_WS = dico['Laws'][law]['Wind_Speed_Measurement_Height']
- pathWT = dico['Laws'][law]['File_Name']
- HH = dico['Laws'][law]['Hub_Height']
- alpha = dico['Laws'][law]['AlphaWS']
- PercentLoss = dico['Laws'][law]['Percent_Losses']
- x_copy[ite][i] = eol(np.array([x[ite][i]]), z_WS, pathWT, HH, alpha, PercentLoss)[0]
- # x_copy[ite][i]=x[ite][i]
- elif dico['Laws'][law]['TF_Input'] == 'tuples list':
- x_copy[ite][i] = applyTF(x[ite][i], dico['Laws'][law]['TF_Values'])
- else: # ensure values are between 0 and 1
- Pval = x[ite][i]
- Pval = min(Pval, 1)
- Pval = max(Pval, 0)
- x_copy[ite][i] = Pval
- else: # law=='N_1_fromFile"
- x_copy[ite][i] == int(floor(x[ite][i]))
- # creer donnes pour data_trigger.csv
- lenlaw = len(x_copy[0]) - 1 # nombre de laws
- xlaw = [] # xlaw ne prend pas le colonne N_1 de x_copy
-
- if nN1!=0:
- for iter in range(len(x)):
- aa = [] # variable temporaire
- for ii in range(lenlaw):
- aa.append(x_copy[iter][ii])
- xlaw.append(aa)
- else:
- for iter in range(len(x)):
- aa = [] # variable temporaire
- for ii in range(lenlaw+1):
- aa.append(x_copy[iter][ii])
- xlaw.append(aa)
-
-
- nameN1 = [] # nom des elements N_1
- for N1 in N_1_LINES:
- nameN1.append(N1)
- for N1 in N_1_TRANSFORMERS:
- nameN1.append(N1)
- for N1 in N_1_MOTORS:
- nameN1.append(N1)
- for N1 in N_1_LOADS:
- nameN1.append(N1)
- for N1 in N_1_GENERATORS:
- nameN1.append(N1)
- matrixN1 = np.zeros((len(x), nN1))
-
- # creer matrix pour les elements dans 'N_1_fromFile"
- for ite in range(len(x)):
- for i, law in enumerate(LawsList):
- if law == 'N_1_fromFile': # law=='N_1_fromFile"
- x_copy[ite][i] = int(floor(x[ite][i]))
- if x_copy[ite][i] < 0:
- pass
-
- if x_copy[ite][i] < len(continLines): # L'element tire est une ligne
- line_num = int(x_copy[ite][i])
- line_name = continLines[int(line_num)]
- for ii, name in enumerate(nameN1):
- if line_name == name:
- matrixN1[ite][ii] = 1
-
- elif x_copy[ite][i] < (len(continLines) + len(continGroups)):
- group_num = int(x_copy[ite][i]) - len(continLines)
- group_name = continGroups[int(group_num)]
- for ii, name in enumerate(nameN1):
- if group_name == name:
- matrixN1[ite][ii] = 1
-
- elif x_copy[ite][i] < (len(continLines) + len(continGroups) + len(continTransfos)):
- transfo_num = int(x_copy[ite][i]) - len(continLines) - len(continGroups)
- transfo_name = continTransfos[int(transfo_num)]
- for ii, name in enumerate(nameN1):
- if transfo_name == name:
- matrixN1[ite][ii] = 1
- elif x_copy[ite][i] < (len(continLines) + len(continGroups) + len(continTransfos) + len(continLoads)):
- load_num = int(x_copy[ite][i]) - len(continLines) - len(continGroups) - len(continTransfos)
- load_name = continLoads[int(load_num)]
- for ii, name in enumerate(nameN1):
- if load_name == name:
- matrixN1[ite][ii] = 1
-
- elif x_copy[ite][i] < (len(continLines) + len(continGroups) + len(continTransfos) + len(
- continLoads) + len(continMotors)):
- motor_num = int(x_copy[ite][i]) - len(continLines) - len(continGroups) - len(continTransfos) - len(
- continLoads)
- motor_name = continMotors[int(motor_num)]
- for ii, name in enumerate(nameN1):
- if motor_name == name:
- matrixN1[ite][ii] = 1
- else:
- pass
- xchavec = np.column_stack([np.asarray(xlaw), matrixN1])
- # write data_trigger.csv file for chavecfile characteristic
- aa = np.asarray(xchavec)
- bb = np.arange(0, len(xchavec)) + position
- cc = np.column_stack([bb, aa])
- np.savetxt('data.csv', cc, delimiter=';', fmt='%10.5f')
- filer = open('data.csv', 'r')
- filew = open('data_trigger.csv', 'a')
- for line in filer:
- if PFParams['DECIMAL_SEPARATOR'] == ",":
- text = line.replace('.', ',')
- text = text.replace(' ', '')
- else:
- text = line.replace(' ', '')
- filew.write(text)
- filer.close()
- filew.close()
- filer = os.path.join(os.getcwd(), 'data.csv')
- os.remove(filer)
-
- stop = time.clock(); print('Prepare to run comTask in ' + str(round(stop - start, 3)) + ' seconds'); start = stop;
- if sys.platform.startswith("win"): # traitement pour eviter les messages d'erreur qui peuvent bloquer le programme
- import ctypes
- SEM_NOGPFAULTERRORBOX = 0x0002
- ctypes.windll.kernel32.SetErrorMode(SEM_NOGPFAULTERRORBOX);
- CREATE_NO_WINDOW = 0x08000000
- subprocess_flags = CREATE_NO_WINDOW
- else:
- subprocess_flags = 0
-
-
- lancer = [dico['Paths']['Python3_path']+'/python.exe', os.path.dirname(os.path.realpath(__file__)) +'/run_in_PFfunction.py']
- print('before run_in_PFfunction.py')
- proc1 = subprocess.Popen(lancer,shell=True,creationflags=subprocess_flags)
- # proc.wait()
- aa=0
- while 1:
- aa += 1
- print('==========time since start of package================' + str(aa*5)) # compter le temps
-
- final = []
- for element in os.listdir(dico['doc_base']):
- if element.endswith('.final'):
- final.append(element)
-
-
- if len(final) >= dico['lenpac'] - 2:# supposons 2 cas ne peut pas se terminer
- if len(final) == dico['lenpac']:
- comtask_ok = 0 # comtask reussi
- else:
- comtask_ok = 1 # comtask non reussi, manque quelque cas
- time.sleep(5)
- if proc1.poll()!=0:
- var1=subprocess.call(['taskkill', '/F', '/T', '/PID', str(proc1.pid)],stdout=subprocess.PIPE)
- # proc.kill()
- break
- if (proc1.poll()!=None):
- comtask_ok=0
- flag_error=1
- filew = open(os.path.dirname(os.path.realpath(__file__)) + '/canotComtast' + str(position) + '.txt', 'w')
- filew.write( 'ignore'+ '\n')
- filew.close()
- var1 =subprocess.call(['taskkill', '/F', '/T', '/PID', str(proc1.pid)],stdout=subprocess.PIPE)
- break
- time.sleep(5)
- cmd = 'WMIC PROCESS get Caption,Processid'
- proc2 = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
- task = []
- for line in proc2.stdout:
- task.append(str(line))
- # print(str(line))
- # bb = 0
- for kk in task:
- if 'PowerFactory' in kk:
- var2 =subprocess.call('tskill PowerFactory',stdout=subprocess.PIPE)
-
- print('terminate run_in_PFfunction.py')
-
- if comtask_ok == 1:# refaire la simulation des studycases manques
-
- final = []
- for element in os.listdir(dico['doc_base']):
- if element.endswith('.final'):
- final.append(element)
- if len(final) != dico['lenpac']:# verifier encore une fois si tous les cas sont simules
- filew = open(os.path.dirname(os.path.realpath(__file__))+'/absence'+str(position)+'.txt', 'w')
- for ite in range(len(x)):
- name = 'Case_' + str(ite + dico['position']) + '.final'
- if name not in final:
- filew.write(str(ite + dico['position']) + '\n')
- filew.close()
- print('Run correct_comtask.py now')
- lancer = [dico['Paths']['Python3_path']+'\python.exe', os.path.dirname(os.path.realpath(__file__)) +'/correct_comtask.py']
- # time.sleep(20)
- proc = subprocess.Popen(lancer,creationflags=subprocess_flags)
- proc.poll()
- proc.wait()
- # print(proc.returncode)
- # print('proc.returncode===============ater correct_comtask')
- print('after correct_comtask.py')
- var3 = subprocess.call(['taskkill', '/F', '/T', '/PID', str(proc.pid)], stdout=subprocess.PIPE)
-
- cmd = 'WMIC PROCESS get Caption,Processid'
- proc4 = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
- task = []
- for line in proc4.stdout:
- task.append(str(line))
- # print(str(line))
- # bb = 0
- for kk in task:
- if 'PowerFactory' in kk:
- # bb += 1
- print('!!!!!!!!!!!!!!!! PowerFactory remains After CorrectComtask !!!!!!!!!!!!!!!!!!!')
- # os.system('tskill ' + 'PowerFactory') # focer de fermer PowerFactory
- var2 =subprocess.call('tskill PowerFactory',stdout=subprocess.PIPE)
- # print('====================' + str(bb))
- stop = time.clock(); print('Run ComTask in ' + str(round(stop - start, 3)) + ' seconds'); start = stop;
- var1 = subprocess.call(['taskkill', '/F', '/T', '/PID', str(proc1.pid)], stdout=subprocess.PIPE)
-
- ##########################################################################################END calcul parallele
- ##########################################################################################BEGIN traitement donne
-
- if flag_error==0:
- if dico['UnitCommitment']:
- beforeUC = []
- for element in os.listdir(dico['doc_base']):
- if element.endswith('.before'):
- beforeUC.append(element)
- mm = [0] # start to extract number for sort case's name
- for aa in range(1, len(beforeUC)): # extract number in string
- nn = ''.join(ele for ele in beforeUC[aa] if ele.isdigit())
- mm.append(int(nn))
- nn = sorted(mm)
- aa = []
- for kk in nn:
- aa.append(beforeUC[mm.index(kk)])
- beforeUC = aa # sort names
- # os.chdir(dico['doc_base'])
- for case in beforeUC[-len(x):]:
- with open(case, 'rb') as fichier:
- mon_depickler = pickle.Unpickler(fichier)
- y, z, Ymac, indicLS, indicFS, loadShed, fxshnt = mon_depickler.load()
- nn = ''.join(ele for ele in case if ele.isdigit()) #extrait number
- x2 = xlaw[int(nn)-dico['position']].copy()
- for ii in range(len(matrixN1[int(nn)-dico['position']])):
- if matrixN1[int(nn)-dico['position']][ii] == 1:
- x2.append(nameN1[ii])
- # x2=x_copy[int(nn)]
- Output_beforeUC.append(z) # append the output
- Pmachine_beforeUC.append(Ymac)
- LS_beforeUC.append(indicLS)
- FS_beforeUC.append(indicFS)
- LStable_beforeUC.extend(loadShed)
- FStable_beforeUC.extend(fxshnt)
- if TStest == 1:
- MyLogger(x2, y, z, dico['logCSVfilename_UC'][num_pac], timeVect[int(nn)])#ite])
- else:
- MyLogger(x2, y, z, dico['logCSVfilename_UC'][num_pac], int(nn)) # for each iteration write in the CSV
- for file in beforeUC:# effacer les fichiers pickle
- os.remove(file)
- # print('Show UC in ' + str(round(stop - start, 3)) + ' seconds'); start = stop;
-
- final = []
- for element in os.listdir(dico['doc_base']):
- if element.endswith('.final'):
- final.append(element)
- mm = [0] # start to extract number for sort case's name
- for aa in range(1, len(final)): # extract number in string
- nn = ''.join(ele for ele in final[aa] if ele.isdigit())
- mm.append(int(nn))
- nn = sorted(mm)
- aa = []
- for kk in nn:
- aa.append(final[mm.index(kk)])
- final = aa # sort names
- # os.chdir(dico['doc_base'])
- for case in final[-len(x):]:
- with open(case, 'rb') as fichier:
- mon_depickler = pickle.Unpickler(fichier)
- y, z, Ymac, indicLS, indicFS, loadShed, fxshnt = mon_depickler.load()
- nn = ''.join(ele for ele in case if ele.isdigit()) # extrait number
- x2 = xlaw[int(nn)-dico['position']].copy()
- for ii in range(len(matrixN1[int(nn)-dico['position']])):
- if matrixN1[int(nn)-dico['position']][ii] == 1:
- x2.append(nameN1[ii])
- # x2 = x_copy[int(nn)-dico['position']]
- if TStest == 1:
- MyLogger(x2, y, z, logCSVfilename[num_pac], timeVect[int(nn)])#ite])
- else:
- MyLogger(x2, y, z, logCSVfilename[num_pac], int(nn)) # for each iteration write in the CSV
- Output.append(z) # append the output
- Pmachine.append(Ymac)
- LS.append(indicLS)
- FS.append(indicFS)
- LStable.extend(loadShed)
- FStable.extend(fxshnt)
- for file in final:# effacer les fichiers pickle
- os.remove(file)
- print(nameN1)
- ##########################################################################################END traitement donne
-
- return inputSample, Output, Pmachine, LS, FS, LStable, FStable, Output_beforeUC, Pmachine_beforeUC, LS_beforeUC, FS_beforeUC, LStable_beforeUC, FStable_beforeUC
-
-def create_dist(dico):
-
- NumLaws = len(dico['Laws']) + int(dico['N_1_fromFile'])
-
- #Create a correlation matrix as copulas
- CorrMatrixNames = dico['CorrMatrix']['laws']
- CorrMatrix = dico['CorrMatrix']['matrix']
- corr=CorrelationMatrix(NumLaws)#Openturns
-
- # Create a collection of the marginal distributions
- collectionMarginals = DistributionCollection(NumLaws)#Openturns
-
- distributionX = []
- for i,key in enumerate(CorrMatrixNames):
- data, [time_serie, time_serie_file] = getUserLaw(dico['Laws'][key])
- distributionX.append( data )
- collectionMarginals[i] = Distribution(data)
-
- #add N_1 components entered as Files
- if dico['N_1_fromFile']==True:
- continTuples = []
- for j in range(len(dico['continVal'])):
- continTuples.append((dico['continVal'][j],dico['continProb'][j]))
- data = getUserDefined(continTuples)
- distributionX.append(data)
- collectionMarginals[i+1] = Distribution(data)
- aa = []
- for bb in CorrMatrixNames:
- aa.append(bb)
- aa.append('N_1_fromFile')
- dico['CorrMatrix']['laws'] = aa
- CorrMatrixEx = np.hstack((CorrMatrix, np.zeros((NumLaws-1,1)))) #assume no correlation between N-1 and other laws
- LastLine = np.hstack((np.zeros((1,NumLaws-1)),np.ones((1,1))))
- CorrMatrixEx = np.vstack((CorrMatrixEx, LastLine))
- CorrMatrix = CorrMatrixEx
- (Nrows, Ncols) = np.shape(CorrMatrixEx)
- else:
- (Nrows, Ncols) = np.shape(CorrMatrix)
- for i in range(Nrows):
- for j in range(Ncols):
- corr[i,j]=CorrMatrix[i,j]
-
- corr2= NormalCopula.GetCorrelationFromSpearmanCorrelation(corr)
- copula=Copula(NormalCopula(corr2))
- #copula=Copula(NormalCopula(corr))
-
- # Create the input probability distribution, args are the distributions, the correlation laws
- inputDistribution = ComposedDistribution(collectionMarginals, copula)
-
- return inputDistribution
-
-def Calculation(dico,nb_fix,cmd_Path):
- msg = 'run'
- output1=[]
- inputSamp1=[]
- Pmachine1=[]
- Ind1,Ind2=[],[]
- LStable = []
- FStable = []
- LStable_beforeUC = []
- FStable_beforeUC = []
- output_beforeUC = []
- Pmachine_beforeUC = []
- t = 0 #numero de package
-
-
- p = subprocess.Popen([dico['Paths']['Python3_path']+'\\python.exe', cmd_Path], stdout=subprocess.PIPE) # launch subprocess
- nbsr = NonBlockingStreamReader(p.stdout) # monitor subprocess stdout
-# if debug:
-# chemin=os.path.abspath(os.path.join(os.getcwd(), '../'))
-# else:
-
- chemin=os.getcwd()
- dico['cheminPSEN'] = chemin
- os.chdir(dico['doc_base']) # to work in correct directory
-
- flag2 = dico['flag2']
- inputDistribution = create_dist(dico) # create new distribution
- RandomGenerator.SetSeed(os.getpid())
- outputSampleAll = NumericalSample(0,12)
-
- while msg == 'run':
-
- stop = time.clock();start=stop;
-
- t += 1
- print('Package ' + str(t))
- # LStable=[]
- # FStable=[]
- output=[]
- inputSample=[]
- Pmachine=[]
- # LStable_beforeUC=[]
- # FStable_beforeUC=[]
- # output_beforeUC=[]
- # Pmachine_beforeUC=[]
-
- myMCE = MonteCarloExperiment(inputDistribution,dico['lenpac']) #create new sample
- inputSamp = myMCE.generate()
- dicow = dico.copy()
- dicow['inputSamp']=inputSamp
- del dicow['all_inputs_init']
- del dicow['CorrMatrix']
- dicow['CorrMatrix'] = {}
- dicow['CorrMatrix']['laws'] = list(dico['CorrMatrix']['laws'])
- dicow['CorrMatrix']['matrix'] = dico['CorrMatrix']['matrix']
-
- with open(chemin + '/PSEN/data_dico', 'wb') as fichier: # sauvegarder pour passer les donnes au compython
- mon_pickler = pickle.Pickler(fichier, protocol=2)
- mon_pickler.dump(dicow)
- print(' Enter in PFfunction.py')
- res=PFFunct(dico,inputSamp) #launch PSSEFunct (OPF)
- print('Out PFfunction.py')
- # 0 1 2 3 4 5 6
- #inputSample, Output, Pmachine, LS, FS, LStable, FStable,
- # 7 8 9 10 11 12
- #Output_beforeUC, Pmachine_beforeUC, LS_beforeUC, FS_beforeUC, LStable_beforeUC, FStable_beforeUC
- for result in res[1]:
- outputSampleAll.add(NumericalPoint(result)) #create a Numerical Sample variable
- if (flag2):
- LS=(np.mean(res[3])) #mean per package
- FS=(np.mean(res[4])) #mean per package
- z=[LS,FS]
- #if criteria on nbeTension and NbeTransit
- else:
- NbeTransit=(float(NumericalPoint(1,outputSampleAll.computeMean()[0])[0])) #mean per package
- NbeTension=(float(NumericalPoint(1,outputSampleAll.computeMean()[1])[0]))
- z=[NbeTransit,NbeTension]
-
-
- inputSample.extend(res[0])
- LStable.extend(res[5])
- FStable.extend(res[6])
- output.extend(res[1])
- Pmachine.extend(res[2])
-
- LStable_beforeUC.extend(res[11])
- FStable_beforeUC.extend(res[12])
- output_beforeUC.extend(res[7])
- Pmachine_beforeUC.extend(res[8])
-
- output1.extend(output)
- inputSamp1.extend(inputSample)
- Pmachine1.extend(Pmachine)
- if msg=='run':
- msg, indice1, indice2=Convergence(dico,int(dico['PFParams']['LS_Q_CONVERGENCE_CRITERIA']), nb_fix, cmd_Path,z,t)# verifier la convergence
- Ind1.append(indice1)
- Ind2.append(indice2)
- if len(Ind1) == nb_fix:
- msg = 'stop'
- if msg == 'stop':
- p.terminate()
- appui = nbsr.readline(0.1)
- if appui:
- print('Simulation Interrupting.....')
- msg = 'stop'
- dico['position'] += dico['lenpac']
- stop = time.clock(); start = stop;
-
- print('terminate all package, prepare to export Allcase.pfd file')
- cmd = 'WMIC PROCESS get Caption,Processid'
- proc2 = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
- task = []
- for line in proc2.stdout:
- task.append(str(line))
- # print(str(line))
- for kk in task:
- if 'PowerFactory' in kk:
- # bb += 1
- print('!!!!!!!!!!!!!!!! PowerFactory remains in Calculation !!!!!!!!!!!!!!!!!!!')
- var2 =subprocess.call('tskill PowerFactory',stdout=subprocess.PIPE)
- time.sleep(5)
-
- if dico['UnitCommitment']:
- f=open(dico['logCSVfilename_UC'][dico['num_pac']],'a')
- f.write("\n Summary Table for MW Load Adjustments;;;;;;;;Summary Table for Added Shunt (Mvar)\n")
- f.write("Iteration;;Bus Number;Name;Load Shed;Remaining Load;;;Iteration;;Bus Number;Final \n")
- for i in range(max(len(LStable_beforeUC),len(FStable_beforeUC))):
- try:
- f.write('{0};;{1};{2};{3};{4}'.format(LStable_beforeUC[i][0],LStable_beforeUC[i][1]\
- ,LStable_beforeUC[i][2],LStable_beforeUC[i][3],LStable_beforeUC[i][4]))
- except:
- f.write(';;;;;')
- try:
- f.write(';;;{0};;{1};{2} \n'.format(FStable_beforeUC[i][0],FStable_beforeUC[i][1],FStable_beforeUC[i][2]))
- except:
- f.write('\n')
- f.write("\n\n")
- f.close()
-
- ## #write summary tables
- f=open(dico['logCSVfilename'][dico['num_pac']],'a')
- f.write("\n Summary Table for MW Load Adjustments;;;;;;;;Summary Table for Added Shunt (Mvar)\n")
- f.write("Iteration;;Bus Number;Name;Load Shed;Remaining Load;;;Iteration;;Bus Number;Final \n")
- for i in range(max(len(LStable), len(FStable))):
- try:
- f.write('{0};;{1};{2};{3};{4}'.format(LStable[i][0],LStable[i][1]\
- ,LStable[i][2],LStable[i][3],LStable[i][4]))
- except:
- f.write(';;;;;')
- try:
- f.write(';;;{0};;{1};{2} \n'.format(FStable[i][0],FStable[i][1],FStable[i][2]))
- except:
- f.write('\n')
- f.write("\n\n")
- f.close()
-
- try:
- import powerfactory
- app = powerfactory.GetApplication()
- user = app.GetCurrentUser()
- prjs = user.GetContents('*.IntPrj')
- prjs.sort(key=lambda x: x.gnrl_modif, reverse=True)
- prj = prjs[0]
- # prj.Activate()
- ComExp = user.CreateObject('ComPfdExport')# objet pour exporter .pfd file final qui contient tous les cas de simulation
- app.SetWriteCacheEnabled(1) # Disable consistency check
- ComExp.g_objects = [prj] # define the project to be exported
- ComExp.g_file = os.path.join(dico['doc_base'], "AllCase.pfd")
- err = ComExp.Execute() # Command starts the export process
- app.SetWriteCacheEnabled(0) # Enable consistency check
- print(prj)
- print(prj.loc_name)
- ComExp.Delete()
- prj.Delete()
- stop = time.clock(); print(' Export all study case in ' + str(round(stop - start, 3)) + ' seconds'); start = stop;
- except:
- pass
- import shutil
- shutil.copy2(chemin + '/PSEN/data_dico', 'data_dico') # sauvegarder donnees
-
- shdfileUC = []
- for element in os.listdir(os.path.dirname(os.path.realpath(__file__))):
-# tempdir = r'C:\Logiciels DER\PSEN_PF_V4\Example\Results'
-# for element in tempdir:
- if element.endswith('.shdUC'):
- shdfileUC.append(element)
- mm = [] # start to extract number for sort case's name
- for aa in range(len(shdfileUC)): # extract number in string
- nn = ''.join(ele for ele in shdfileUC[aa] if ele.isdigit())
- mm.append(int(nn))
- nn = sorted(mm)
- aa = []
- for kk in nn:
- aa.append(shdfileUC[mm.index(kk)])
- shdfileUC = aa # sort names
-
- if len(shdfileUC)>0:
- # dico['doc_base']
- filew = open(os.path.dirname(dico['doc_base']) + '/No_Cost_OPF_convergence_beforeUC' + '.csv', 'w')
- for aa in range(len(shdfileUC)): # extract number in string
- strings = aa
- strings = shdfileUC[aa].split('_')
- filew.write('Case_' + strings[1] + ';' + strings[2].split('.')[0] + '\n')
- filew.close()
- for file in shdfileUC:
- os.remove(os.path.dirname(os.path.realpath(__file__)) + '\\' + file)
-
- shdfile = []
- for element in os.listdir(os.path.dirname(os.path.realpath(__file__))):
-# for element in tempdir:
- if element.endswith('.shd'):
- shdfile.append(element)
- mm = [] # start to extract number for sort case's name
- for aa in range(len(shdfile)): # extract number in string
- nn = ''.join(ele for ele in shdfile[aa] if ele.isdigit())
- mm.append(int(nn))
- nn = sorted(mm)
- aa = []
- for kk in nn:
- aa.append(shdfile[mm.index(kk)])
- shdfile = aa # sort names
-
- if len(shdfile)>0:
- # dico['doc_base']
- filew = open(os.path.dirname(dico['doc_base']) + '/No_Cost_OPF_convergence' + '.csv', 'w')
- for aa in range(len(shdfile)): # extract number in string
- strings = aa
- strings = shdfile[aa].split('_')
- filew.write('Case_' + strings[1] + ';' + strings[2].split('.')[0] + '\n')
- filew.close()
-
- for file in shdfile: # effacer les fichiers pickle
- os.remove(os.path.dirname(os.path.realpath(__file__)) + '\\' + file)
-
- return Ind1,Ind2,output1,inputSamp1,Pmachine1
-
-class NonBlockingStreamReader(): #class object to read in a stdout process
-
- def __init__(self, stream):
- '''
- stream: the stream to read from.
- Usually a process' stdout or stderr.
- '''
- self._s = stream
- self._q = Queue()
-
- def _populateQueue(stream, queue):
- '''
- Collect lines from 'stream' and put them in 'queue'.
- '''
- while True:
- line = stream.read()
- if line:
- queue.put(line)
- else:
- pass
- self._t = Thread(target = _populateQueue,
- args = (self._s, self._q))
- self._t.daemon = True
- self._t.start() #start collecting lines from the stream
-
- def readline(self, timeout = None):
- try:
- return self._q.get(block = timeout is not None,
- timeout = timeout)
- except Empty:
- return None
-
-def Convergence(dico,OPF, nb_fix, cmd_Path,z,t):
- LS=[]
- FS=[]
- MoyTension=[]
- MoyTransit=[]
- MoyCumuLS=[]
- MoyCumuFS=[]
- NbeTension=[]
- NbeTransit=[]
- msg='run'
- print ('Calculating convergence criteria\n')
- debut=z
- # t += 1
- # print('Package ' + str(t))
- if (OPF): # if criteria on Load shed and mvar
- LS.append(debut[0])
- FS.append(debut[1])
- MoyCumuLS.append(np.mean(LS[0:t]))
- MoyCumuFS.append(np.mean(FS[0:t]))
-
- if t == 1:
- indice1 = 1
- indice2 = 1
- else:
- indice1 = np.std(MoyCumuLS) # calculate stop criterion for load shedding
- indice2 = np.std(MoyCumuFS) # calculate stop criterion for mvar
-
- Ind1.append(indice1)
- Ind2.append(indice2)
- print('indicator Load Shedding= ' + str(indice1) + ';' + ' indicator Added Mvar= ' + str(indice2) + '\n')
-
- if (indice1 < 0.2) and (indice2 < 0.015) and nb_fix == 0:
- msg = 'stop'
- # break
- elif len(Ind1) == nb_fix:
- msg = 'stop'
- # break
- else:
- NbeTransit.append(debut[0])
- NbeTension.append(debut[1])
- MoyTension.append(np.mean(NbeTension[0:len(NbeTension)]))
- MoyTransit.append(np.mean(NbeTransit[0:len(NbeTransit)]))
-
- if t == 1:
- indice1 = 1
- indice2 = 1
- else:
- indice1 = np.std(MoyTension) # calculate stop criterion for tension
- indice2 = np.std(MoyTransit) # calculate stop criterion for transit
-
- print('indicator Nbe Tension= ' + str(indice1) + ' indicator Transit= ' + str(indice2) + '\n')
-
- if (indice1 < 0.01) and (indice2 < 0.01) and nb_fix == 0:
- msg = 'stop'
-
- return msg,indice1,indice2
+++ /dev/null
-# -*- coding: cp1252 -*-
-import sys
-from Tkinter import *
-import os
-
-
-def maFonction6(event):
- quitting()
-
-def quitting():
- can1.delete(proceeding)
- can1.create_text(200,50,font=('Fixedsys',12),text="If you want to quit press button again...")
- Button(root,text="Stop Simulation",font=("Fixedsys"),command=really_quitting).grid(row=4,column=1,sticky=N,padx=5)
-
-def really_quitting():
- print 'quitting'
- root.destroy()
-
-# création d'une instance de la classe TK, que l'on affecte à l'objet "root"
-root = Tk()
-root.title("PSEN - Processing...")
-can1=Canvas(root,width=400,height=100,bg="light blue")
-can1.grid(row=0,column=0,rowspan=10)
-
-proceeding=can1.create_text(200,50,font=('Fixedsys',12),text="Processing...")
-
-Button(root,text="Stop Simulation",font=("Fixedsys"),command=quitting).grid(row=4,column=1,sticky=N,padx=5)
-root.bind("<q>", maFonction6) # lettre q
-root.mainloop()
+++ /dev/null
-# -*- coding: cp1252 -*-
-import sys
-from tkinter import *
-import os
-
-
-def maFonction6(event):
- quitting()
-
-def quitting():
- can1.delete(proceeding)
- can1.create_text(200,50,font=('Fixedsys',12),text="If you want to quit press button again...")
- Button(root,text="Stop Simulation",font=("Fixedsys"),command=really_quitting).grid(row=4,column=1,sticky=N,padx=5)
-
-def really_quitting():
- print ('quitting')
- root.destroy()
-
-# création d'une instance de la classe TK, que l'on affecte à l'objet "root"
-root = Tk()
-root.title("PSEN - Processing...")
-can1=Canvas(root,width=400,height=100,bg="light blue")
-can1.grid(row=0,column=0,rowspan=10)
-
-proceeding=can1.create_text(200,50,font=('Fixedsys',12),text="Processing...")
-
-Button(root,text="Stop Simulation",font=("Fixedsys"),command=quitting).grid(row=4,column=1,sticky=N,padx=5)
-root.bind("<q>", maFonction6) # lettre q
-root.mainloop()