-
Notifications
You must be signed in to change notification settings - Fork 466
/
auxiliary.py
1583 lines (1313 loc) · 62.4 KB
/
auxiliary.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
# -*- coding: utf-8 -*-
# Copyright (c) 2016-2024 by University of Kassel and Fraunhofer Institute for Energy Economics
# and Energy System Technology (IEE), Kassel. All rights reserved.
# Additional copyright for modified code by Brendan Curran-Johnson (ADict class):
# Copyright (c) 2013 Brendan Curran-Johnson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# (https://github.com/bcj/AttrDict/blob/master/LICENSE.txt)
import copy
from collections.abc import MutableMapping
import warnings
from importlib.metadata import version as version_str
from importlib.metadata import PackageNotFoundError
import numpy as np
import pandas as pd
import scipy as sp
import numbers
from packaging.version import Version
from pandapower.pypower.idx_brch import F_BUS, T_BUS, BR_STATUS
from pandapower.pypower.idx_bus import BUS_I, BUS_TYPE, NONE, PD, QD, VM, VA, REF, VMIN, VMAX, PV
from pandapower.pypower.idx_gen import PMIN, PMAX, QMIN, QMAX
from pandapower.pypower.idx_ssc import SSC_STATUS, SSC_BUS, SSC_INTERNAL_BUS
from pandapower.pypower.idx_tcsc import TCSC_STATUS, TCSC_F_BUS, TCSC_T_BUS
try:
from lightsim2grid.newtonpf import newtonpf_new as newtonpf_ls
lightsim2grid_available = True
except ImportError:
lightsim2grid_available = False
try:
import pandaplan.core.pplog as logging
except ImportError:
import logging
logger = logging.getLogger(__name__)
def log_to_level(msg, passed_logger, level):
if level == "error":
passed_logger.error(msg)
elif level == "warning":
passed_logger.warning(msg)
elif level == "info":
passed_logger.info(msg)
elif level == "debug":
passed_logger.debug(msg)
elif level == "UserWarning":
raise UserWarning(msg)
elif level is None:
pass
def version_check(package_name, level="UserWarning", ignore_not_installed=False):
minimum_version = {'plotly': "3.1.1",
'numba': "0.25",
}
if ignore_not_installed and package_name not in minimum_version.keys():
return
try:
version = version_str(package_name)
if Version(version) < Version(minimum_version.get(package_name, '0.0.0')):
log_to_level((
f"{package_name} version {version} is no longer supported by pandapower.\r\n"
f"Please upgrade your installation. Possibly it can be done via "
f"'pip install --upgrade {package_name}'."), logger, level)
except PackageNotFoundError:
if ignore_not_installed:
raise PackageNotFoundError(
f"Python package '{package_name}', is needed.\r\nPlease install it. "
f"Possibly it can be installed via 'pip install {package_name}'.")
try:
from numba import jit
try:
version_check("numba")
NUMBA_INSTALLED = True
except UserWarning:
msg = 'The numba version is too old.\n'
log_to_level(msg, logger, 'warning')
NUMBA_INSTALLED = False
except ImportError:
from .pf.no_numba import jit
NUMBA_INSTALLED = False
def soft_dependency_error(fct_name, required_packages):
required_packages = required_packages if isinstance(required_packages, str) else \
"','".join(required_packages)
raise ImportError(
"Some pandapower functionality use modules outside the setup.py "
f"requirements: {fct_name} requires '{required_packages}'. \n"
f"{required_packages} could not be imported.\n"
'To install all pandapower dependencies, pip install pandapower["all"] can be used.')
def warn_and_fix_parameter_renaming(old_parameter_name, new_parameter_name, new_parameter,
default_value, category=DeprecationWarning, **kwargs):
if old_parameter_name in kwargs:
warnings.warn(f"Parameter '%s' has been renamed to '%s'." % (
old_parameter_name, new_parameter_name), category=category)
if new_parameter == default_value:
return kwargs.pop(old_parameter_name)
return new_parameter
class ADict(dict, MutableMapping):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# to prevent overwrite of internal attributes by new keys
# see _valid_name()
self._setattr('_allow_invalid_attributes', False)
def _build(self, obj, **kwargs):
"""
We only want dict like elements to be treated as recursive AttrDicts.
"""
return obj
# --- taken from AttrDict
def __getstate__(self):
return self.copy(), self._allow_invalid_attributes
def __dir__(self):
return list(self.keys())
def __setstate__(self, state):
mapping, allow_invalid_attributes = state
self.update(mapping)
self._setattr('_allow_invalid_attributes', allow_invalid_attributes)
@classmethod
def _constructor(cls, mapping):
return cls(mapping)
# --- taken from MutableAttr
def _setattr(self, key, value):
"""
Add an attribute to the object, without attempting to add it as
a key to the mapping (i.e. internals)
"""
super(MutableMapping, self).__setattr__(key, value)
def __setattr__(self, key, value):
"""
Add an attribute.
key: The name of the attribute
value: The attributes contents
"""
if self._valid_name(key):
self[key] = value
elif getattr(self, '_allow_invalid_attributes', True):
super(MutableMapping, self).__setattr__(key, value)
else:
raise TypeError(
"'{cls}' does not allow attribute creation.".format(
cls=self.__class__.__name__
)
)
def _delattr(self, key):
"""
Delete an attribute from the object, without attempting to
remove it from the mapping (i.e. internals)
"""
super(MutableMapping, self).__delattr__(key)
def __delattr__(self, key, force=False):
"""
Delete an attribute.
key: The name of the attribute
"""
if self._valid_name(key):
del self[key]
elif getattr(self, '_allow_invalid_attributes', True):
super(MutableMapping, self).__delattr__(key)
else:
raise TypeError(
"'{cls}' does not allow attribute deletion.".format(
cls=self.__class__.__name__
)
)
def __call__(self, key):
"""
Dynamically access a key-value pair.
key: A key associated with a value in the mapping.
This differs from __getitem__, because it returns a new instance
of an Attr (if the value is a Mapping object).
"""
if key not in self:
raise AttributeError(
"'{cls} instance has no attribute '{name}'".format(
cls=self.__class__.__name__, name=key
)
)
return self._build(self[key])
def __getattr__(self, key):
"""
Access an item as an attribute.
"""
if key not in self or not self._valid_name(key):
raise AttributeError(
"'{cls}' instance has no attribute '{name}'".format(
cls=self.__class__.__name__, name=key
)
)
return self._build(self[key])
def __deepcopy__(self, memo):
"""
overloads the deepcopy function of pandapower if at least one DataFrame with column
"object" is in net
in addition, line geodata can contain mutable objects like lists, and it is also treated
specially
reason: some of these objects contain a reference to net which breaks the default deepcopy
function. Also, the DataFrame doesn't deepcopy its elements if geodata changes in the
lists, it affects both net instances
This fix was introduced in pandapower 2.2.1
"""
deep_columns = {'object', 'coords', 'geometry'}
cls = self.__class__
result = cls.__new__(cls)
memo[id(self)] = result
for k, v in self.items():
if isinstance(v, pd.DataFrame) and not set(v.columns).isdisjoint(deep_columns):
if k not in result:
result[k] = v.__class__(index=v.index, columns=v.columns)
for col in v.columns:
if col in deep_columns:
result[k][col] = v[col].apply(lambda x: copy.deepcopy(x, memo))
else:
result[k][col] = copy.deepcopy(v[col], memo)
_preserve_dtypes(result[k], v.dtypes)
else:
setattr(result, k, copy.deepcopy(v, memo))
result._setattr('_allow_invalid_attributes', self._allow_invalid_attributes)
return result
@classmethod
def _valid_name(cls, key):
"""
Check whether a key is a valid attribute name.
A key may be used as an attribute if:
* It is a string
* The key doesn't overlap with any class attributes (for Attr,
those would be 'get', 'items', 'keys', 'values', 'mro', and
'register').
"""
return (
isinstance(key, str) and
not hasattr(cls, key)
)
class pandapowerNet(ADict):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if isinstance(args[0], self.__class__):
net = args[0]
self.clear()
self.update(**net.deepcopy())
for key in self:
if isinstance(self[key], list):
self[key] = pd.DataFrame(np.zeros(0, dtype=self[key]), index=pd.Index([],
dtype=np.int64))
def deepcopy(self):
return copy.deepcopy(self)
def __repr__(self): # pragma: no cover
"""
See Also
--------
count_elements
"""
par = []
res = []
for et in list(self.keys()):
if not et.startswith("_") and isinstance(self[et], pd.DataFrame) and len(self[et]) > 0:
n_rows = self[et].shape[0]
if 'res_' in et:
res.append(" - %s (%i %s)" % (et, n_rows, "element" + plural_s(n_rows)))
elif et == 'group':
n_groups = len(set(self[et].index))
par.append(' - %s (%i %s, %i %s)' % (
et, n_groups, "group" + plural_s(n_groups), n_rows, "row" + plural_s(n_rows)))
else:
par.append(" - %s (%i %s)" % (et, n_rows, "element" + plural_s(n_rows)))
res_cost = [" and the following result values:",
" - %s" % "res_cost"] if "res_cost" in self.keys() else []
if not len(par) + len(res):
return "This pandapower network is empty"
if len(res):
res = [" and the following results tables:"] + res
lines = ["This pandapower network includes the following parameter tables:"] + \
par + res + res_cost
return "\n".join(lines)
def plural_s(number):
if number > 1:
return "s"
else:
return ""
def ets_to_element_types(ets=None):
ser = pd.Series(["bus", "line", "trafo", "trafo3w", "impedance"],
index=["b", "l", "t", "t3", "i"])
if ets is None:
return ser
elif isinstance(ets, str):
return ser.at[ets]
else:
return list(ser.loc[ets])
def element_types_to_ets(element_types=None):
ser1 = ets_to_element_types()
ser2 = pd.Series(ser1.index, index=list(ser1))
if element_types is None:
return ser2
elif isinstance(ets, str):
return ser2.at[element_types]
else:
return list(ser2.loc[element_types])
def _preserve_dtypes(df, dtypes):
for item, dtype in list(dtypes.items()):
if df.dtypes.at[item] != dtype:
if (dtype == bool or dtype == np.bool_) and np.any(df[item].isnull()):
raise UserWarning(f"Encountered NaN value(s) in a boolean column {item}! "
f"NaN are casted to True by default, which can lead to errors. "
f"Replace NaN values with True or False first.")
try:
df[item] = df[item].astype(dtype)
except ValueError:
df[item] = df[item].astype(float)
def get_free_id(df):
"""
Returns next free ID in a dataframe
"""
index_values = df.index.get_level_values(0) if isinstance(df.index, pd.MultiIndex) else df.index.values
return np.int64(0) if len(df) == 0 else index_values.max() + 1
class ppException(Exception):
"""
General pandapower custom parent exception.
"""
pass
def _sum_by_group(bus, first_val, second_val):
order = np.argsort(bus)
bus = bus[order]
index = np.ones(len(bus), 'bool')
index[:-1] = bus[1:] != bus[:-1]
bus = bus[index]
first_val = first_val[order]
first_val.cumsum(out=first_val)
first_val = first_val[index]
first_val[1:] = first_val[1:] - first_val[:-1]
second_val = second_val[order]
second_val.cumsum(out=second_val)
second_val = second_val[index]
second_val[1:] = second_val[1:] - second_val[:-1]
return bus, first_val, second_val
def _sum_by_group_nvals(bus, *vals):
order = np.argsort(bus)
bus = bus[order]
index = np.ones(len(bus), 'bool')
index[:-1] = bus[1:] != bus[:-1]
bus = bus[index]
newvals = tuple(np.zeros((len(vals), len(bus))))
for val, newval in zip(vals, newvals):
val = val[order]
val.cumsum(out=val)
val = val[index]
val[1:] = val[1:] - val[:-1]
newval[:] = val
# Returning vals keeps the original array dimensions, which causes an error if more than one element is
# connected to the same bus. Instead, we create a second tuple of arrays on which we map the results.
# Todo: Check if this workaround causes no problems
return (bus,) + newvals
def get_indices(selection, lookup, fused_indices=True):
"""
Helper function during pd2mpc conversion. It resolves the mapping from a
given selection of indices to the actual indices, using a dict lookup being
passed as well.
:param selection: Indices we want to select
:param lookup: The mapping itself
:param fused_indices: Flag which way the conversion is working.
:return:
"""
if fused_indices:
return np.array([lookup[k] for k in selection], dtype=np.int64)
else:
return np.array([lookup["before_fuse"][k] for k in selection], dtype=np.int64)
def _get_values(source, selection, lookup):
"""
Returns values for a selection of values after a lookup.
:param source: The array of values to select from.
:param selection: An array of keys, for the selection.
:param lookup: The mapping to resolve actual indices of the
value array from the selection.
:return:
"""
v = np.zeros(len(selection))
for i, k in enumerate(selection):
v[i] = source[lookup[np.int64(k)]]
return v
def ensure_iterability(var, len_=None):
"""
Ensures iterability of a variable (and also the length if given).
Examples
--------
>>> ensure_iterability([1, 2])
[1, 2]
>>> ensure_iterability(1)
[1]
>>> ensure_iterability("Hi")
["Hi"]
>>> ensure_iterability([1, 2], len_=2)
[1, 2]
>>> ensure_iterability([1, 2], len_=3)
ValueError("Length of variable differs from 3.")
"""
if hasattr(var, "__iter__") and not isinstance(var, str):
if isinstance(len_, int) and len(var) != len_:
raise ValueError("Length of variable differs from %i." % len_)
else:
len_ = len_ or 1
var = [var] * len_
return var
def read_from_net(net, element, index, variable, flag='auto'):
"""
Reads values from the specified element table at the specified index in the column according to the specified variable
Chooses the method to read based on flag
Parameters
----------
net
element : str
element table in pandapower net; can also be a results table
index : int or array_like
index of the element table where values are read from
variable : str
column of the element table
flag : str
defines which underlying function to use, can be one of ['auto', 'single_index', 'all_index', 'loc', 'object']
Returns
-------
values
the values of the variable for the element table according to the index
"""
if flag == "single_index":
return _read_from_single_index(net, element, variable, index)
elif flag == "all_index":
return _read_from_all_index(net, element, variable)
elif flag == "loc":
return _read_with_loc(net, element, variable, index)
elif flag == "object":
return _read_from_object_attribute(net, element, variable, index)
elif flag == "auto":
auto_flag, auto_variable = _detect_read_write_flag(net, element, index, variable)
return read_from_net(net, element, index, auto_variable, auto_flag)
else:
raise NotImplementedError("read: flag must be one of ['auto', 'single_index', 'all_index', 'loc', 'object']")
def write_to_net(net, element, index, variable, values, flag='auto'):
"""
Writes values to the specified element table at the specified index in the column according to the specified variable
Chooses the method to write based on flag
Parameters
----------
net
element : str
element table in pandapower net
index : int or array_like
index of the element table where values are written to
variable : str
column of the element table
flag : str
defines which underlying function to use, can be one of ['auto', 'single_index', 'all_index', 'loc', 'object']
Returns
-------
None
"""
# write functions faster, depending on type of element_index
if flag == "single_index":
_write_to_single_index(net, element, index, variable, values)
elif flag == "all_index":
_write_to_all_index(net, element, variable, values)
elif flag == "loc":
_write_with_loc(net, element, index, variable, values)
elif flag == "object":
_write_to_object_attribute(net, element, index, variable, values)
elif flag == "auto":
auto_flag, auto_variable = _detect_read_write_flag(net, element, index, variable)
write_to_net(net, element, index, auto_variable, values, auto_flag)
else:
raise NotImplementedError("write: flag must be one of ['auto', 'single_index', 'all_index', 'loc', 'object']")
def _detect_read_write_flag(net, element, index, variable):
if variable.startswith('object'):
# write to object attribute
return "object", variable.split(".")[1]
elif isinstance(index, numbers.Number):
# use .at if element_index is integer for speedup
return "single_index", variable
# commenting this out for now, see issue 609
# elif net[element].index.equals(Index(index)):
# # use : indexer if all elements are in index
# return "all_index", variable
else:
# use common .loc
return "loc", variable
# read functions:
def _read_from_single_index(net, element, variable, index):
return net[element].at[index, variable]
def _read_from_all_index(net, element, variable):
return net[element].loc[:, variable].values
def _read_with_loc(net, element, variable, index):
return net[element].loc[index, variable].values
def _read_from_object_attribute(net, element, variable, index):
if hasattr(index, '__iter__') and len(index) > 1:
values = np.array(shape=index.shape)
for i, idx in enumerate(index):
values[i] = getattr(net[element]["object"].at[idx], variable)
else:
values = getattr(net[element]["object"].at[index], variable)
return values
# write functions:
def _write_to_single_index(net, element, index, variable, values):
net[element].at[index, variable] = values
def _write_to_all_index(net, element, variable, values):
net[element].loc[:, variable] = values
def _write_with_loc(net, element, index, variable, values):
net[element].loc[index, variable] = values
def _write_to_object_attribute(net, element, index, variable, values):
if hasattr(index, '__iter__') and len(index) > 1:
for idx, val in zip(index, values):
setattr(net[element]["object"].at[idx], variable, val)
else:
setattr(net[element]["object"].at[index], variable, values)
def _set_isolated_nodes_out_of_service(ppc, bus_not_reachable):
isolated_nodes = np.where(bus_not_reachable)[0]
if len(isolated_nodes) > 0:
logger.debug("There are isolated buses in the network! (%i nodes in the PPC)"%len(isolated_nodes))
# set buses in ppc out of service
ppc['bus'][isolated_nodes, BUS_TYPE] = NONE
pus = abs(ppc['bus'][isolated_nodes, PD] * 1e3).sum()
qus = abs(ppc['bus'][isolated_nodes, QD] * 1e3).sum()
if pus > 0 or qus > 0:
logger.debug("%.0f kW active and %.0f kVar reactive power are unsupplied" % (pus, qus))
else:
pus = qus = 0
return isolated_nodes, pus, qus, ppc
def _check_connectivity_opf(ppc):
"""
Checks if the ppc contains isolated buses and changes slacks to PV nodes if multiple slacks are
in net.
:param ppc: pypower case file
:return:
"""
br_status = ppc['branch'][:, BR_STATUS].astype(bool)
nobranch = ppc['branch'][br_status, :].shape[0]
nobus = ppc['bus'].shape[0]
bus_from = ppc['branch'][br_status, F_BUS].real.astype(np.int64)
bus_to = ppc['branch'][br_status, T_BUS].real.astype(np.int64)
slacks = ppc['bus'][ppc['bus'][:, BUS_TYPE] == 3, BUS_I].astype(np.int64)
adj_matrix = sp.sparse.coo_matrix((np.ones(nobranch),
(bus_from, bus_to)),
shape=(nobus, nobus))
bus_not_reachable = np.ones(ppc["bus"].shape[0], dtype=bool)
slack_set = set(slacks)
for slack in slacks:
if ppc['bus'][slack, BUS_TYPE] == PV:
continue
reachable = sp.sparse.csgraph.breadth_first_order(adj_matrix, slack, False, False)
bus_not_reachable[reachable] = False
reach_set = set(reachable)
intersection = slack_set & reach_set
if len(intersection) > 1:
# if slack is in reachable other slacks are connected to this one. Set it to Gen bus
demoted_slacks = list(intersection - {slack})
ppc['bus'][demoted_slacks, BUS_TYPE] = PV
logger.warning("Multiple connected slacks in one area found. This would probably lead "
"to non-convergence of the OPF. Therefore, all but one slack (ext_grid)"
" were changed to gens. To avoid undesired behaviour, rather convert the"
" slacks to gens yourself and set slack=True for only one of them.")
isolated_nodes, pus, qus, ppc = _set_isolated_nodes_out_of_service(ppc, bus_not_reachable)
return isolated_nodes, pus, qus
def _check_connectivity(ppc):
"""
Checks if the ppc contains isolated buses. If yes this isolated buses are set out of service
:param ppc: pypower case file
:return:
"""
br_status = ppc['branch'][:, BR_STATUS].astype(bool)
nobranch = ppc['branch'][br_status, :].shape[0]
nobus = ppc['bus'].shape[0]
bus_from = ppc['branch'][br_status, F_BUS].real.astype(np.int64)
bus_to = ppc['branch'][br_status, T_BUS].real.astype(np.int64)
slacks = ppc['bus'][ppc['bus'][:, BUS_TYPE] == 3, BUS_I]
tcsc_status = ppc["tcsc"][:, TCSC_STATUS].real.astype(bool)
notcsc = ppc["tcsc"][tcsc_status, :].shape[0]
bus_from_tcsc = ppc["tcsc"][tcsc_status, TCSC_F_BUS].real.astype(np.int64)
bus_to_tcsc = ppc["tcsc"][tcsc_status, TCSC_T_BUS].real.astype(np.int64)
ssc_status = ppc["ssc"][:, SSC_STATUS].real.astype(bool)
nossc = ppc["ssc"][ssc_status, :].shape[0]
bus_from_ssc = ppc["ssc"][ssc_status, SSC_BUS].real.astype(np.int64)
bus_to_ssc = ppc["ssc"][ssc_status, SSC_INTERNAL_BUS].real.astype(np.int64)
# we create a "virtual" bus thats connected to all slack nodes and start the connectivity
# search at this bus
bus_from = np.hstack([bus_from, bus_from_tcsc, bus_from_ssc, slacks])
bus_to = np.hstack([bus_to, bus_to_tcsc, bus_to_ssc, np.ones(len(slacks)) * nobus])
adj_matrix = sp.sparse.coo_matrix((np.ones(nobranch + notcsc + nossc + len(slacks)),
(bus_from, bus_to)),
shape=(nobus + 1, nobus + 1))
reachable = sp.sparse.csgraph.breadth_first_order(adj_matrix, nobus, False, False)
# TODO: the former impl. excluded ppc buses that are already oos, but is this necessary ?
# if so: bus_not_reachable = np.hstack([ppc['bus'][:, BUS_TYPE] != 4, np.array([False])])
bus_not_reachable = np.ones(ppc["bus"].shape[0] + 1, dtype=bool)
bus_not_reachable[reachable] = False
isolated_nodes, pus, qus, ppc = _set_isolated_nodes_out_of_service(ppc, bus_not_reachable)
return isolated_nodes, pus, qus
def _subnetworks(ppc):
"""
Return a list of lists of the connected buses of the network
:param ppc: pypower case file
:return:
"""
br_status = ppc['branch'][:, BR_STATUS].astype(bool)
oos_bus = ppc['bus'][:, BUS_TYPE] == NONE
nobranch = ppc['branch'][br_status, :].shape[0]
nobus = ppc['bus'].shape[0]
bus_from = ppc['branch'][br_status, F_BUS].real.astype(np.int64)
bus_to = ppc['branch'][br_status, T_BUS].real.astype(np.int64)
# Note BUS_TYPE is never REF when the generator is out of service.
slacks = ppc['bus'][ppc['bus'][:, BUS_TYPE] == REF, BUS_I]
adj_matrix = sp.sparse.csr_matrix((np.ones(nobranch), (bus_from, bus_to)),
shape=(nobus, nobus))
# Set out of service buses to have no connections (*=0 instead of =0 to avoid sparcity warning).
adj_matrix[oos_bus, :] *= 0
adj_matrix[:, oos_bus] *= 0
traversed_buses = set()
subnets = []
for slack in slacks:
if slack in traversed_buses:
continue
reachable = sp.sparse.csgraph.breadth_first_order(
adj_matrix, slack, directed=False, return_predecessors=False)
traversed_buses |= set(reachable)
subnets.append(list(reachable))
return subnets
def _python_set_elements_oos(ti, tis, bis, lis): # pragma: no cover
for i in range(len(ti)):
if tis[i] and bis[ti[i]]:
lis[i] = True
def _python_set_isolated_buses_oos(bus_in_service, ppc_bus_isolated,
bus_lookup): # pragma: no cover
for k in range(len(bus_in_service)):
if ppc_bus_isolated[bus_lookup[k]]:
bus_in_service[k] = False
try:
get_values = jit(nopython=True, cache=True)(_get_values)
set_elements_oos = jit(nopython=True, cache=True)(_python_set_elements_oos)
set_isolated_buses_oos = jit(nopython=True, cache=True)(_python_set_isolated_buses_oos)
except RuntimeError:
get_values = jit(nopython=True, cache=False)(_get_values)
set_elements_oos = jit(nopython=True, cache=False)(_python_set_elements_oos)
set_isolated_buses_oos = jit(nopython=True, cache=False)(_python_set_isolated_buses_oos)
def _select_is_elements_numba(net, isolated_nodes=None, sequence=None):
# is missing sgen_controllable and load_controllable
max_bus_idx = np.max(net["bus"].index.values)
bus_in_service = np.zeros(max_bus_idx + 1, dtype=bool)
bus_in_service[net["bus"].index.values] = net["bus"]["in_service"].values.astype(bool)
if isolated_nodes is not None and len(isolated_nodes) > 0:
ppc = net["_ppc"] if sequence is None else net["_ppc%s" % sequence]
ppc_bus_isolated = np.zeros(ppc["bus"].shape[0], dtype=bool)
ppc_bus_isolated[isolated_nodes] = True
set_isolated_buses_oos(bus_in_service, ppc_bus_isolated, net["_pd2ppc_lookups"]["bus"])
# mode = net["_options"]["mode"]
elements = ["load", "motor", "sgen", "asymmetric_load", "asymmetric_sgen", "gen",
"ward", "xward", "shunt", "ext_grid", "storage", "svc", "ssc"] # ,"impedance_load"
is_elements = dict()
for element in elements:
len_ = len(net[element].index)
element_in_service = np.zeros(len_, dtype=bool)
if len_ > 0:
element_df = net[element]
set_elements_oos(element_df["bus"].values, element_df["in_service"].values,
bus_in_service, element_in_service)
if net["_options"]["mode"] == "opf" and element in ["load", "sgen", "storage"]:
if "controllable" in net[element]:
controllable = net[element].controllable.fillna(False).values.astype(bool)
controllable_is = controllable & element_in_service
if controllable_is.any():
is_elements["%s_controllable" % element] = controllable_is
element_in_service = element_in_service & ~controllable_is
is_elements[element] = element_in_service
is_elements["bus_is_idx"] = net["bus"].index.values[bus_in_service[net["bus"].index.values]]
is_elements["line_is_idx"] = net["line"].index[net["line"].in_service.values]
return is_elements
def _add_ppc_options(net, calculate_voltage_angles, trafo_model, check_connectivity, mode,
switch_rx_ratio, enforce_q_lims, recycle, delta=1e-10,
voltage_depend_loads=False, trafo3w_losses="hv", init_vm_pu=1.0,
init_va_degree=0, p_lim_default=1e9, q_lim_default=1e9,
neglect_open_switch_branches=False, consider_line_temperature=False,
distributed_slack=False, tdpf=False, tdpf_update_r_theta=True, tdpf_delay_s=None):
"""
creates dictionary for pf, opf and short circuit calculations from input parameters.
"""
# if recycle is None:
# recycle = dict(trafo=False, bus_pq=False, bfsw=False)
init_results = (isinstance(init_vm_pu, str) and (init_vm_pu == "results")) or \
(isinstance(init_va_degree, str) and (init_va_degree == "results"))
options = {
"calculate_voltage_angles": calculate_voltage_angles,
"trafo_model": trafo_model,
"check_connectivity": check_connectivity,
"mode": mode,
"switch_rx_ratio": switch_rx_ratio,
"enforce_q_lims": enforce_q_lims,
"recycle": recycle,
"voltage_depend_loads": voltage_depend_loads,
"consider_line_temperature": consider_line_temperature,
"tdpf": tdpf,
"tdpf_update_r_theta": tdpf_update_r_theta,
"tdpf_delay_s": tdpf_delay_s,
"distributed_slack": distributed_slack,
"delta": delta,
"trafo3w_losses": trafo3w_losses,
"init_vm_pu": init_vm_pu,
"init_va_degree": init_va_degree,
"init_results": init_results,
"p_lim_default": p_lim_default,
"q_lim_default": q_lim_default,
"neglect_open_switch_branches": neglect_open_switch_branches,
}
_add_options(net, options)
def _check_bus_index_and_print_warning_if_high(net, n_max=1e7):
max_bus = max(net.bus.index.values)
if max_bus >= n_max > len(net["bus"]):
logger.warning("Maximum bus index is high (%i). You should avoid high bus indices because "
"of perfomance reasons. Try resetting the bus indices with the toolbox "
"function create_continuous_bus_index()" % max_bus)
def _check_gen_index_and_print_warning_if_high(net, n_max=1e7):
if net.gen.empty:
return
max_gen = max(net.gen.index.values)
if max_gen >= n_max > len(net["gen"]):
logger.warning("Maximum generator index is high (%i). You should avoid high generator "
"indices because of perfomance reasons. Try resetting the bus indices with "
"the toolbox function create_continuous_elements_index()" % max_gen)
def _add_pf_options(net, tolerance_mva, trafo_loading, numba, ac,
algorithm, max_iteration, **kwargs):
"""
creates dictionary for pf, opf and short circuit calculations from input parameters.
"""
options = {
"tolerance_mva": tolerance_mva,
"trafo_loading": trafo_loading,
"numba": numba,
"ac": ac,
"algorithm": algorithm,
"max_iteration": max_iteration
}
options.update(kwargs) # update options with some algorithm-specific parameters
_add_options(net, options)
def _add_opf_options(net, trafo_loading, ac, v_debug=False, **kwargs):
"""
creates dictionary for pf, opf and short circuit calculations from input parameters.
"""
options = {
"trafo_loading": trafo_loading,
"ac": ac,
"v_debug": v_debug
}
options.update(kwargs) # update options with some algorithm-specific parameters
_add_options(net, options)
def _add_sc_options(net, fault, case, lv_tol_percent, tk_s, topology, r_fault_ohm,
x_fault_ohm, kappa, ip, ith, branch_results,
kappa_method, return_all_currents,
inverse_y, use_pre_fault_voltage):
"""
creates dictionary for pf, opf and short circuit calculations from input parameters.
"""
options = {
"fault": fault,
"case": case,
"lv_tol_percent": lv_tol_percent,
"tk_s": tk_s,
"topology": topology,
"r_fault_ohm": r_fault_ohm,
"x_fault_ohm": x_fault_ohm,
"kappa": kappa,
"ip": ip,
"ith": ith,
"branch_results": branch_results,
"kappa_method": kappa_method,
"return_all_currents": return_all_currents,
"inverse_y": inverse_y,
"use_pre_fault_voltage": use_pre_fault_voltage
}
_add_options(net, options)
def _add_options(net, options):
# double_parameters = set(net.__internal_options.keys()) & set(options.keys())
double_parameters = set(net._options.keys()) & set(options.keys())
if len(double_parameters) > 0:
raise UserWarning(
"Parameters always have to be unique! The following parameters where specified " +
"twice: %s" % double_parameters)
# net.__internal_options.update(options)
net._options.update(options)
def _clean_up(net, res=True):
# mode = net.__internal_options["mode"]
# set internal selected _is_elements to None. This way it is not stored (saves disk space)
# net._is_elements = None
# mode = net._options["mode"]
# if res:
# res_bus = net["res_bus_sc"] if mode == "sc" else \
# net["res_bus_3ph"] if mode == "pf_3ph" else \
# net["res_bus"]
# if len(net["trafo3w"]) > 0:
# buses_3w = net.trafo3w["ad_bus"].values
# net["bus"].drop(buses_3w, inplace=True)
# net["trafo3w"].drop(["ad_bus"], axis=1, inplace=True)
# if res:
# res_bus.drop(buses_3w, inplace=True)
#
# if len(net["xward"]) > 0:
# xward_buses = net["xward"]["ad_bus"].values
# net["bus"].drop(xward_buses, inplace=True)
# net["xward"].drop(["ad_bus"], axis=1, inplace=True)
# if res:
# res_bus.drop(xward_buses, inplace=True)
if len(net["dcline"]) > 0:
dc_gens = net.gen.index[(len(net.gen) - len(net.dcline) * 2):]
net.gen = net.gen.drop(dc_gens)
if res:
net.res_gen = net.res_gen.drop(dc_gens)
def _set_isolated_buses_out_of_service(net, ppc):
# set disconnected buses out of service
# first check if buses are connected to branches
disco = np.setxor1d(ppc["bus"][:, 0].astype(np.int64),
ppc["branch"][ppc["branch"][:, 10] == 1, :2].real.astype(np.int64).flatten())
# but also check if they may be the only connection to an ext_grid
net._isolated_buses = np.setdiff1d(disco, ppc['bus'][ppc['bus'][:, 1] == REF,
:1].real.astype(np.int64))
ppc["bus"][net._isolated_buses, 1] = NONE
def _write_lookup_to_net(net, element, element_lookup):
"""
Updates selected lookups in net
"""
net["_pd2ppc_lookups"][element] = element_lookup
def _check_if_numba_is_installed(level="warning"):
if not NUMBA_INSTALLED:
msg = (
'numba cannot be imported and numba functions are disabled.\n'
'Probably the execution is slow.\n'
'Please install numba to gain a massive speedup.\n'
'(or if you prefer slow execution, set the flag numba=False to avoid this warning!)')