1 # -*- coding: utf-8 -*-
\r
3 Created on Mon Jun 03 15:31:42 2013
\r
9 import os,sys,random,string
\r
10 sys.path.append(r"C:\Program Files\PTI\PSSE33\PSSBIN")
\r
11 os.environ['PATH'] = r"C:\Program Files\PTI\PSSE33\PSSBIN;"+ os.environ['PATH']
\r
16 _i=psspy.getdefaultint()
\r
17 _f=psspy.getdefaultreal()
\r
18 _s=psspy.getdefaultchar()
\r
21 psspy.psseinit(80000)
\r
25 from openturns import *
\r
27 #===============================================================================
\r
28 # DEFINITION DES FONCTIONS - CREATION OF THE FUNCTIONS
\r
29 #===============================================================================
\r
31 #Fonction de transfert vent-puissance d'une eolienne
\r
32 def eol(wind, WTconfig):
\r
34 Vrate = WTconfig [1]
\r
35 Vcout = WTconfig [2]
\r
37 lossrate = WTconfig [4]
\r
41 Pnorm=wind*(1-lossrate)#((wind**2-Vcin**2)/(Vrate**2-Vcin**2)*Rho/1.225*(1-lossrate))
\r
43 Pnorm = 1*(1-lossrate)
\r
48 #Fonction permettant de lire les donnees qui nous interessent et de les mettre dans une matrice
\r
51 # Select what to report
\r
52 if psspy.bsysisdef(0):
\r
54 else: # Select subsytem with all buses
\r
57 flag_bus = 1 # in-service
\r
58 flag_plant = 4 # in-service
\r
59 flag_load = 1 # in-service
\r
60 flag_swsh = 1 # in-service
\r
61 flag_brflow = 1 # in-service
\r
62 owner_brflow = 1 # bus, ignored if sid is -ve
\r
63 ties_brflow = 5 # ignored if sid is -ve
\r
64 entry = 1 # gives a single entry (each branch once)
\r
66 #Bus data (number, basekV, pu, name, ...) : PSSe has 3 functions one for integer data, one for real data and one for strings
\r
67 istrings = ['number']
\r
68 ierr, idata = psspy.abusint(sid, flag_bus, istrings)
\r
71 rstrings = ['base','pu']
\r
72 ierr, rdata = psspy.abusreal(sid, flag_bus, rstrings)
\r
73 buses.append(rdata[0])
\r
74 buses.append(rdata[1])
\r
77 ierr, cdata = psspy.abuschar(sid, flag_bus, cstrings)
\r
78 buses.append(cdata[0])
\r
80 buses=zip(*buses) # transpose the matrix
\r
82 del idata, rdata, istrings, rstrings
\r
84 #Lines data (from, to, amps, rate%a, ploss, qloss)
\r
85 flag=2 #All non-transformer branches
\r
86 istrings = ['fromnumber','tonumber']
\r
87 ierr, idata = psspy.abrnint(sid, owner_brflow, ties_brflow, flag, entry, istrings)
\r
90 rstrings=['amps','pctratea','pctrateb','pctratec','p','q']
\r
91 ierr, rdata = psspy.abrnreal(sid, owner_brflow, ties_brflow, flag, entry, rstrings)
\r
92 for rc in range (np.matrix(rdata).shape[0]) :
\r
93 lines.append(rdata[rc])
\r
95 cstrings=['fromname','toname','id']
\r
96 ierr, cdata = psspy.abrnchar(sid, owner_brflow, ties_brflow, flag, entry, cstrings)
\r
97 for rc in range (np.matrix(cdata).shape[0]) :
\r
98 lines.append(cdata[rc])
\r
100 lines=zip(*lines) # transpose the matrix
\r
102 del idata, rdata, istrings, rstrings
\r
104 #2 windings transformers data (from, to, amps, rate%a, ploss, qloss)
\r
105 flag=6 #All transformer branches
\r
106 istrings = ['fromnumber','tonumber']
\r
107 ierr, idata = psspy.abrnint(sid, owner_brflow, ties_brflow, flag, entry, istrings)
\r
110 rstrings=['amps','pctratea','ploss','qloss']
\r
111 ierr, rdata = psspy.abrnreal(sid, owner_brflow, ties_brflow, flag, entry, rstrings)
\r
112 for rc in range (np.matrix(rdata).shape[0]) :
\r
113 transf.append(rdata[rc])
\r
115 transf=zip(*transf) # transpose the matrix
\r
117 del idata, rdata, istrings, rstrings
\r
119 #Machines data (bus, inservice, number, pgen, qgen, mvabase)
\r
120 istrings = ['number','status']
\r
121 ierr, idata = psspy.amachint(sid, flag_plant, istrings)
\r
125 ierr, cdata = psspy.amachchar(sid, flag_plant, cstrings)
\r
126 for rc in range (np.matrix(cdata).shape[0]) :
\r
127 plants.append(cdata[rc])
\r
129 rstrings = ['pgen','qgen','mbase','pmax','qmax']
\r
130 ierr, rdata = psspy.amachreal(sid, flag_plant, rstrings)
\r
131 for rc in range (np.matrix(rdata).shape[0]) :
\r
132 plants.append(rdata[rc])
\r
134 cstrings = ['name']
\r
135 ierr, cdata = psspy.amachchar(sid, flag_plant, cstrings)
\r
136 plants.append(cdata[0])
\r
138 nb_plants=np.matrix(plants).shape[1]
\r
139 for rc in range (0,nb_plants) :
\r
140 plants[3][rc]=float(plants[3][rc]*int(plants[1][rc])) # If the plant isn't in service its production is fixed to zero
\r
141 plants[4][rc]=float(plants[4][rc]*int(plants[1][rc])) # If the plant isn't in service its production is fixed to zero
\r
143 plants=zip(*plants) # transpose the matrix
\r
145 #Loads data (bus, active, reactive)
\r
146 istrings = ['number']
\r
147 ierr, idata = psspy.aloadint(sid, flag_load, istrings)
\r
150 xstrings = ['mvaact']
\r
151 ierr, xdata = psspy.aloadcplx(sid, flag_load, xstrings)
\r
152 loads.append(np.real(xdata)[0]) # Append the real part of the load
\r
153 loads.append(np.imag(xdata)[0]) #Append the imaginary part of the load
\r
155 istrings = ['status']
\r
156 ierr, idata = psspy.aloadint(sid, flag_load, istrings)
\r
157 loads.append(idata[0])
\r
159 cstrings = ['name']
\r
160 ierr, cdata = psspy.aloadchar(sid, flag_load, cstrings)
\r
161 loads.append(cdata[0])
\r
163 nb_loads=np.matrix(loads).shape[1]
\r
164 for rc in range (0,nb_loads) :
\r
165 loads[1][rc]=float(loads[1][rc]*int(loads[3][rc])) # If the load isn't in service its consumption is fixed to zero
\r
166 loads[2][rc]=float(loads[2][rc]*int(loads[3][rc])) # If the load isn't in service its consumption is fixed to zero
\r
168 loads=zip(*loads) # transpose the matrix
\r
170 #Fixed shunt data (number, MVAR, name, ...)
\r
171 istrings = ['number','status']
\r
172 ierr, idata = psspy.afxshntbusint(sid, flag_bus, istrings)
\r
175 xstrings = ['shuntact']
\r
176 ierr, xdata = psspy.afxshntbuscplx(sid, flag_bus, xstrings)
\r
177 shunt.append(np.imag(xdata)[0]) #Append the imaginary part of the load
\r
179 cstrings = ['name']
\r
180 ierr, cdata = psspy.afxshntbuschar(sid, flag_bus, cstrings)
\r
181 shunt.append(cdata[0])
\r
183 shunt=zip(*shunt) # transpose the matrix
\r
185 return buses, lines, transf, plants, loads, shunt
\r
187 # Fonction pour ecrire un fichier de sortie type csv
\r
188 def MyLogger(x,y,z,logCSVfilename,logTXTfilename,ite):
\r
189 f=open(logCSVfilename, 'a')
\r
190 f.write("%f;" % (ite))
\r
193 for i in range(0,nx):
\r
194 f.write(str(x[i]))#f.write("%f;" % (x[i]))
\r
198 for i in range(0,nz):
\r
199 f.write("%f;" % (z[i]))
\r
202 for j in range(0,ny):
\r
203 f.write("%f;" % (y[j]))
\r
207 f=open(logTXTfilename, 'a')
\r
208 f.write("%f\t" % (ite))
\r
210 for i in range(0,nx):
\r
211 f.write(str(x[i]))#f.write("%f\t" % (x[i]))
\r
214 for i in range(0,nz):
\r
215 f.write("%f\t" % (z[i]))
\r
217 for j in range(0,ny):
\r
218 f.write("%f\t" % (y[j]))
\r
223 # Fonction pour ecrire un fichier de sortie type csv pour chaque type de grandeur de sortie
\r
224 def MyMultiLogger (x, y, sizeY, z, ite, folder, day, fich, hour):
\r
227 for fich in range (np.size(sizeY,0)):
\r
228 multilogfilename=folder+"\N"+day+"\Y"+str(fich)+"simulationDClog"+hour+".csv"
\r
229 f=open(multilogfilename, 'a')
\r
230 f.write("%f;" % (ite))
\r
233 for i in range(0,nx):
\r
234 f.write("%f;" % (x[i]))
\r
237 for i in range(0,nz):
\r
238 f.write("%f;" % (z[i]))
\r
241 for j in range(0,ny):
\r
242 f.write("%f;" % (y[j+y0]))
\r
246 print "Fichiers "+str(ite)+" enregistres\n\n"
\r
248 # Analyses graphiques
\r
249 def graphical_out (inputSample, outputSampleAll, inputDim, outputDim, montecarlosize) :
\r
250 print "\n\n\n Writing graphical analysis files..."
\r
251 # A Pairwise scatter plot of the inputs
\r
253 myPairs = Pairs(inputSample, 'Inputs relations', inputSample.getDescription(), "red", "bullet")
\r
254 myGraph.add(Drawable(myPairs))
\r
255 myGraph.draw("Input Samples",640,480,GraphImplementation.PDF)
\r
256 #View(myGraph.getBitmap())
\r
257 print 'Input pairwise scatterplot done...'
\r
259 # A Pairwise scatter plot of the outputs
\r
261 myPairs = Pairs(outputSampleAll, 'Output relations', outputSampleAll.getDescription(), "red", "bullet")
\r
262 myGraph.add(Drawable(myPairs))
\r
263 myGraph.draw("Output Samples",640,480,GraphImplementation.PDF)
\r
264 #View(myGraph.getBitmap())
\r
265 print 'Output pairwise scatterplot done...'
\r
267 # A Pairwise scatter plot of the inputs/outputs
\r
268 # Draw all scatter plots yj vs xi
\r
269 for j in range(outputDim):
\r
270 outputSamplej=outputSampleAll.getMarginal(j)
\r
271 Ylabelstr=outputSamplej.getDescription()[0]
\r
272 for i in range(inputDim):
\r
273 inputSamplei=inputSample.getMarginal(i)
\r
274 Xlabelstr=inputSamplei.getDescription()[0]
\r
275 X=NumericalSample(montecarlosize,2)
\r
276 for k in range(montecarlosize):
\r
277 X[k,0]=inputSamplei[k][0]
\r
278 X[k,1]=outputSamplej[k][0]
\r
281 mytitle=Ylabelstr+"vs"+Xlabelstr
\r
282 myGraph.add(Drawable(myCloud))
\r
284 myGraph.setXTitle(Xlabelstr)
\r
285 myGraph.setYTitle(Ylabelstr)
\r
286 myGraph.draw(mytitle,640,480,GraphImplementation.PDF)
\r
287 #ViewImage(myGraph.getBitmap())
\r
288 print 'Input/Output pairwise scatterplot done...'
\r
290 # An histogram of the inputs
\r
291 for i in range(inputDim):
\r
292 inputSamplei=inputSample.getMarginal(i)
\r
293 myGraph = VisualTest.DrawHistogram(inputSamplei)
\r
294 labelarray=inputSamplei.getDescription()
\r
295 labelstr=labelarray[0]
\r
296 myGraph.setTitle(labelstr)
\r
297 myGraph.setName(labelstr)
\r
298 myGraph.setXTitle(labelstr)
\r
299 myGraph.setYTitle("Frequency")
\r
300 myGraph.draw(labelstr,640,480,GraphImplementation.PDF)
\r
301 #View(myGraph.getBitmap())
\r
302 print 'Input histogram done...'
\r
304 # An histogram of the outputs
\r
305 for j in range(outputDim):
\r
306 outputSamplej=outputSampleAll.getMarginal(j)
\r
307 myGraph = VisualTest.DrawHistogram(outputSamplej)
\r
308 labelarray=outputSamplej.getDescription()
\r
309 labelstr=labelarray[0]
\r
310 myGraph.setTitle(labelstr)
\r
311 myGraph.setName(labelstr)
\r
312 myGraph.setXTitle(labelstr)
\r
313 myGraph.setYTitle("Frequency")
\r
314 myGraph.draw(labelstr,640,480,GraphImplementation.PDF)
\r
315 #View(myGraph.getBitmap())
\r
316 print 'Output histogram done'
\r
317 print 'Graphical output terminated'
\r
319 def config_ENR(path_config_ENR) :
\r
324 f=open(path_config_ENR,"r")
\r
325 lines=f.readlines()
\r
326 for i in range (len(lines)) :
\r
327 line = lines[i].split(";")
\r
328 if str(line[0]).upper() == 'PV' :
\r
329 PV.append([int(line[1]),i-1,int(line[3])])
\r
330 elif str(line[0]).upper() == 'W1' :
\r
331 Wind1.append([int(line[1]),i-1,int(line[3])])
\r
332 elif str(line[0]).upper() == 'W2' :
\r
333 Wind2.append([int(line[1]),i-1,int(line[3])])
\r
334 elif str(line[0]).upper() == 'I' :
\r
335 Interco.append([int(line[1]),i-1,int(line[3])])
\r
338 return PV, Wind1, Wind2, Interco
\r
340 def config_contingency(path_config_contin) :
\r
343 # Loading of lines contingency configuration
\r
344 f=open(path_config_contin[0],"r")
\r
345 lines=f.readlines()
\r
347 for i in range (len(lines)) :
\r
348 line=lines[i].split(";")
\r
351 except ValueError :
\r
356 lines_con.append([int(line[1]), int(line[3]), str(line[5]),float(line[0].replace(',','.'))])
\r
358 # Loading of groups contingency configuration
\r
359 f=open(path_config_contin[1],"r")
\r
360 lines=f.readlines()
\r
362 for i in range (len(lines)) :
\r
363 line=lines[i].split(";")
\r
366 except ValueError :
\r
371 groups_con.append([int(line[1]), int(line[3]),float(line[0].replace(',','.'))])
\r
373 sizeLines = len(lines_con)
\r
374 sizeGroups = len(groups_con)
\r
377 for i in range(sizeLines+sizeGroups) :
\r
380 for i in range (sizeLines) :
\r
381 prob.append(lines_con[i][3])
\r
382 for i in range (sizeGroups) :
\r
383 prob.append(groups_con[i][2])
\r
385 return lines_con, groups_con, val, prob
\r
387 def LoadARMA(time_serie_file, time_serie_SS, time_serie_TH) :
\r
388 f=open(time_serie_file,"r")
\r
389 lines=f.readlines()
\r
392 for i in range(N) :
\r
393 Xt.append([float(lines[i])])
\r
395 myTG=RegularGrid(0,float(time_serie_SS),N)
\r
396 TS=TimeSeries(myTG,NumericalSample(Xt))
\r
397 myWN=WhiteNoise(Distribution(Normal(0,1)),myTG)
\r
398 myState=ARMAState(TS.getSample(),NumericalSample())
\r
402 myFactory = ARMALikelihoodFactory ( p , q , d )
\r
403 myARMA = myFactory.build(TS)
\r
405 myARMA.setState(myState)
\r
407 AR = myARMA.getARCoefficients()
\r
408 MA = myARMA.getMACoefficients()
\r
410 ts = myARMA.getRealization()
\r
411 ts.setName('A realization')
\r
412 myTSGraph=ts.drawMarginal(0)
\r
413 myTSGraph.draw('Realization'+str(p)+","+str(q),640,480,GraphImplementation.PDF)
\r
414 myARMAState=myARMA.getState()
\r
416 #Make a prediction of the future on next Nit instants
\r
417 Nit = int(time_serie_TH)
\r
418 myARMA2=ARMA(AR,MA,myWN,myARMAState)
\r
419 possibleFuture=myARMA2.getFuture(Nit)
\r
420 possibleFuture.setName('Possible future')
\r
423 for i in range (len(possibleFuture)):
\r
424 Xt2.append(possibleFuture.getValueAtIndex(i)[0])
\r
425 Max=float(max(Xt2))
\r
426 Min=float(min(Xt2))
\r
428 for i in range (len(possibleFuture)):
\r
429 value= (Xt2[i]-Min+h/3)/(Max-Min+h/3)
\r
430 possibleFuture.setValueAtIndex(i,NumericalPoint(1,value))
\r
432 myFG=possibleFuture.drawMarginal(0)
\r
433 myFG.draw('Future'+str(Nit),640,480,GraphImplementation.PDF)
\r
435 return possibleFuture
\r
437 def LoadTS(time_serie_file) :
\r
439 for i in range(len(time_serie_file)) :
\r
440 if time_serie_file[i] == -1 :
\r
443 f=open(time_serie_file[i],"r")
\r
444 lines=f.readlines()
\r
447 for j in range(N) :
\r
450 except ValueError :
\r
451 lines[i] = commaToPoint(lines[i])
\r
454 Xt.append([float(lines[j])])
\r
459 def KSDist(filename) :
\r
460 f=open(filename,"r")
\r
461 print "Creating Kernel Smoothing distribution from: "+str(filename)
\r
462 lines=f.readlines()
\r
465 for i in range(N) :
\r
466 if lines[i] == "\n" :
\r
467 print "End of file"
\r
472 except ValueError :
\r
473 lines[i] = commaToPoint(lines[i])
\r
476 Xt.append([float(lines[i])])
\r
477 NS=NumericalSample(Xt)
\r
478 kernel=KernelSmoothing(Uniform())
\r
479 myBandwith = kernel.computeSilvermanBandwidth(NS)
\r
480 KS=kernel.build(NS,myBandwith,1)
\r
483 def threshold (inputRandomVector, outputVariableOfInterest,pssefun,inputDistribution) :
\r
484 # We create a quadraticCumul algorithm
\r
485 myQuadraticCumul = QuadraticCumul(outputVariableOfInterest)
\r
487 # We compute the several elements provided by the quadratic cumul algorithm
\r
488 # and evaluate the number of calculus needed
\r
489 nbBefr = pssefun.getEvaluationCallsNumber()
\r
492 meanFirstOrder = myQuadraticCumul.getMeanFirstOrder()[0]
\r
493 nbAfter1 = pssefun.getEvaluationCallsNumber()
\r
495 # Mean second order
\r
496 meanSecondOrder = myQuadraticCumul.getMeanSecondOrder()[0]
\r
497 nbAfter2 = pssefun.getEvaluationCallsNumber()
\r
499 # Standard deviation
\r
500 stdDeviation = sqrt(myQuadraticCumul.getCovariance()[0,0])
\r
501 nbAfter3 = pssefun.getEvaluationCallsNumber()
\r
503 print "First order mean=", myQuadraticCumul.getMeanFirstOrder()[0]
\r
504 print "Evaluation calls number = ", nbAfter1 - nbBefr
\r
505 print "Second order mean=", myQuadraticCumul.getMeanSecondOrder()[0]
\r
506 print "Evaluation calls number = ", nbAfter2 - nbAfter1
\r
507 print "Standard deviation=", sqrt(myQuadraticCumul.getCovariance()[0,0])
\r
508 print "Evaluation calls number = ", nbAfter3 - nbAfter2
\r
510 print "Importance factors="
\r
511 for i in range(inputRandomVector.getDimension()) :
\r
512 print inputDistribution.getDescription()[i], " = ", myQuadraticCumul.getImportanceFactors()[i]
\r
515 def getUserDefined (val, prob):
\r
517 val = val.split(',')
\r
518 prob = prob.split(',')
\r
519 except AttributeError :
\r
522 coll = UserDefinedPairCollection()
\r
523 for i in range (dim) :
\r
524 UDpair=UserDefinedPair(NumericalPoint(1,float(val[i])),float(prob[i]))
\r
526 return UserDefined(coll)
\r
528 def getHistogram (step, prob) :
\r
530 step = step.split(',')
\r
531 prob = prob.split(',')
\r
532 except AttributeError :
\r
535 myHistogram = HistogramPairCollection(dim)
\r
536 for i in range (dim) :
\r
537 myHistogram[i]=HistogramPair(float(step[i]),float(prob[i]))
\r
540 def getUserLaw (description) :
\r
541 law_num=int(description[0])
\r
547 law=Normal(float(description[1]),float(description[2]))
\r
548 elif law_num == 2 :
\r
549 law=Uniform(float(description[1]),float(description[2]))
\r
550 elif law_num == 3 :
\r
551 law=Exponential(float(description[1]),float(description[2]))
\r
552 elif law_num == 4 :
\r
553 law=Weibull(float(description[1]),float(description[2]),float(description[3]))
\r
554 elif law_num == 5 :
\r
555 law=TruncatedNormal(float(description[1]),float(description[2]),float(description[3]),float(description[4]))
\r
556 elif law_num == 6 :
\r
557 law=UserDefined(getUserDefined (description[1], description[2]))
\r
558 elif law_num == 7 :
\r
559 law=Histogram(0.0, getHistogram (description[1], description[2]))
\r
560 elif law_num == 10 :
\r
561 law=KSDist(description[1])
\r
562 elif law_num == 20 :
\r
563 law = Uniform(0.999999,1)
\r
565 time_serie_file=description[1]
\r
566 """time_serie_SS=description[2]
\r
567 time_serie_TH=description[3]"""
\r
569 law = Uniform(0.999999,1)
\r
570 return law, [time_serie, time_serie_file] #[time_serie, time_serie_file, time_serie_SS, time_serie_TH]
\r
572 def contingency_automatic (dfxPath, acccPath, rate) :
\r
573 psspy.accc_with_dsp_3( 0.5,[0,0,0,1,1,2,0,0,0,0,0],r"""ALL""",dfxPath,acccPath,"","","")
\r
574 psspy.accc_single_run_report_4([1,int(rate),int(rate),1,1,0,1,0,0,0,0,0],[0,0,0,0,6000],[ 0.5, 5.0, 100.0,0.0,0.0,0.0, 99999.],acccPath)
\r
576 rslt_summary=pssarrays.accc_summary(acccPath)
\r
577 if int(rate) == 1 :
\r
578 rate = rslt_summary.rating.a
\r
579 elif int(rate) == 2 :
\r
580 rate = rslt_summary.rating.b
\r
581 elif int(rate) == 3 :
\r
582 rate = rslt_summary.rating.c
\r
584 print "NO RATE CHOOSEN"
\r
586 Labels=rlst.colabel
\r
588 for label in Labels :
\r
590 rslt=pssarrays.accc_solution(acccPath,contingency,label,0.5,5.0)
\r
591 ampFlow=rslt.ampflow
\r
592 for i in range (len(rA)) :
\r
593 t.append(ampFlow[i]/rate[i])
\r
594 contin_load.append(t)
\r
597 def commaToPoint (string) :
\r
598 stringReplaced = string.replace(',','.')
\r
599 return stringReplaced