diff --git a/.github/workflows/upload_release.yml b/.github/workflows/upload_release.yml index b0188225e..1620310e2 100644 --- a/.github/workflows/upload_release.yml +++ b/.github/workflows/upload_release.yml @@ -66,12 +66,14 @@ jobs: # Waste some time - name: Sleep for 150s to make release available + if: ${{ inputs.upload_server == 'pypi' }} uses: juliangruber/sleep-action@v1 with: time: 150s # Notify fraunhofer ci about the new version - uses: eic/trigger-gitlab-ci@v3 + if: ${{ inputs.upload_server == 'pypi' }} with: url: https://gitlab.cc-asp.fraunhofer.de project_id: 27329 @@ -81,6 +83,7 @@ jobs: # Run an installation for testing - name: Install pandapower from PyPI + if: ${{ inputs.upload_server == 'pypi' }} run: | - python3 -m pip install pandapower - python3 -c "import pandapower; print(pandapower.__version__)" \ No newline at end of file + python3 -m pip install --pre pandapower + python3 -c "import pandapower; print(pandapower.__version__)" diff --git a/CHANGELOG.rst b/CHANGELOG.rst index d64019916..f6dc64e1e 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -3,6 +3,7 @@ Change Log [upcoming release] - 2024-..-.. ------------------------------- +- [ADDED] Static Var Compensator with Voltage Control - [ADDED] Implementation of Allocation Factor WLS (AF-WLS) for non observable distribution grids - [FIXED] Deletion of multiple measurements at the same bus or branch - [FIXED] Creation of zero injection measurements in WLS estimator @@ -19,6 +20,7 @@ Change Log - [ADDED] Add GeographicalRegion and SubGeographicalRegion names and ids to bus df in cim converter - [CHANGED] Capitalize first letter of columns busbar_id, busbar_name and substation_id in bus df for cim converter - [CHANGED] required standard type parameters are made available by function :code:`required_std_type_parameters()` +- [CHANGED] toolbox replace functions (e.g. gen replacement by sgens): improved result table implementation and added profiles consideration - [FIXED] Do not modify pandas options when importing pandapower - [FIXED] fixed copy-paste error in contingency results "max_limit_nminus1" and "min_limit_nminus1" - [ADDED] improved lightsim2grid documentation including compatibitliy issues @@ -92,6 +94,7 @@ Change Log - [ADDED] support for unequal leakage resistance and reactance for HV and LV sides of a 2W-transformer - [ADDED] Add VSC element, dc buses, dc lines, and hybrid AC/DC power flow calculation - [CHANGED] accelerate _integrate_power_elements_connected_with_switch_buses() in get_equivalent() +- [FIXED] index error during unbalanced powerflow if multiple external grids are present - [CHANGED] accelerate distributed slack power flow calculation by using sparse-aware operations in _subnetworks() - [CHANGED] Trafo Controllers can now be added to elements that are out of service, changed self.nothing_to_do() - [ADDED] Discrete shunt controller for local voltage regulation with shunt steps diff --git a/doc/converter/powerfactory.rst b/doc/converter/powerfactory.rst index b33d7614c..1ebfc5ff9 100644 --- a/doc/converter/powerfactory.rst +++ b/doc/converter/powerfactory.rst @@ -28,7 +28,7 @@ The documentation describes how to use the exporter as a function in "Engine mod - ElmZpu (pu Impedance) - ElmSind (Series Reactor) - Elmscap (Series Capacitor) - + - ElmSvs (Static Var Compensator with Voltage Control) Setup PowerFactory and Python ===================================== diff --git a/doc/elements/line_par.csv b/doc/elements/line_par.csv index 9054a0f5a..764b86873 100644 --- a/doc/elements/line_par.csv +++ b/doc/elements/line_par.csv @@ -5,18 +5,18 @@ from_bus*;integer;;Index of bus where the line starts to_bus*;integer;;Index of bus where the line ends length_km*;float;:math:`>` 0;length of the line [km] r_ohm_per_km*;float;:math:`\geq` 0 ;resistance of the line [Ohm per km] -x_ohm_per_km*;float;:math:`\geq` 0 ;inductance of the line [Ohm per km] +x_ohm_per_km*;float;:math:`\geq` 0 ;reactance of the line [Ohm per km] c_nf_per_km*;float;:math:`\geq` 0 ;capacitance of the line (line-to-earth) [nano Farad per km] r0_ohm_per_km****;float;:math:`\geq` 0 ;zero sequence resistance of the line [Ohm per km] -x0_ohm_per_km****;float;:math:`\geq` 0 ;zero sequence inductance of the line [Ohm per km] +x0_ohm_per_km****;float;:math:`\geq` 0 ;zero sequence reactance of the line [Ohm per km] c0_nf_per_km****;float;:math:`\geq` 0 ;zero sequence capacitance of the line [nano Farad per km] g_us_per_km*;float;:math:`\geq` 0 ;dielectric conductance of the line [micro Siemens per km] max_i_ka*;float;:math:`>` 0 ;maximal thermal current [kilo Ampere] parallel*;integer;:math:`\geq` 1;number of parallel line systems df*;float;0...1 ;derating factor (scaling) for max_i_ka -type;string;"| Naming conventions: - -| *""ol""* - overhead line +type;string;"| Naming conventions: + +| *""ol""* - overhead line | *""cs""* - underground cable system";type of line max_loading_percent**;float;:math:`>` 0 ;Maximum loading of the line endtemp_degree***;float;:math:`>` 0 ;Short-Circuit end temperature of the line diff --git a/pandapower/auxiliary.py b/pandapower/auxiliary.py index 507224db5..0f0098783 100644 --- a/pandapower/auxiliary.py +++ b/pandapower/auxiliary.py @@ -36,6 +36,8 @@ from geojson import loads, GeoJSON import numpy as np import pandas as pd +from pandas.api.types import is_numeric_dtype, is_string_dtype, is_object_dtype +# from pandas.api.types import is_integer_dtype, is_float_dtype import scipy as sp import numbers from packaging.version import Version @@ -451,12 +453,23 @@ def element_types_to_ets(element_types=None): ser2 = pd.Series(ser1.index, index=list(ser1)) if element_types is None: return ser2 - elif isinstance(ets, str): + elif isinstance(element_types, str): return ser2.at[element_types] else: return list(ser2.loc[element_types]) +def empty_defaults_per_dtype(dtype): + if is_numeric_dtype(dtype): + return np.nan + elif is_string_dtype(dtype): + return "" + elif is_object_dtype(dtype): + return None + else: + raise NotImplementedError(f"{dtype=} is not implemented in _empty_defaults()") + + def _preserve_dtypes(df, dtypes): for item, dtype in list(dtypes.items()): if df.dtypes.at[item] != dtype: diff --git a/pandapower/contingency/contingency.py b/pandapower/contingency/contingency.py index edcaf2398..6d8a892b9 100644 --- a/pandapower/contingency/contingency.py +++ b/pandapower/contingency/contingency.py @@ -136,15 +136,19 @@ def run_contingency(net, nminus1_cases, pf_options=None, pf_options_nminus1=None def run_contingency_ls2g(net, nminus1_cases, contingency_evaluation_function=pp.runpp, **kwargs): """ - Execute contingency analysis using the lightsim2grid library. This works much faster than using pandapower. - Limitation: the results for branch flows are valid only for the "from_bus" of lines and "hv_bus" of transformers. - This can lead to a small difference to the results using pandapower. - The results are written in pandapower results tables. - Make sure that the N-1 cases do not lead to isolated grid, otherwise results with pandapower and this function will - be different. Reason: pandapower selects a different gen as slack if the grid becomes isolated, but - lightsim2grid would simply return nan as results for such a contingency situation. - WARNING: continuous bus indices, 0-start, are required! - This function can be passed through to pandapower.timeseries.run_timeseries as the run_control_fct argument. + Execute contingency analysis using the lightsim2grid library. This works much faster than using + pandapower. + This function can be passed through to pandapower.timeseries.run_timeseries as the + run_control_fct argument. + + **Limitation:** the results for branch flows are valid only for the "from_bus" of lines and + "hv_bus" of transformers. This can lead to a small difference to the results using pandapower. + The results are written in pandapower results tables. Make sure that the N-1 cases do not lead + to isolated grid, otherwise results with pandapower and this function will + be different. Reason: pandapower selects a different gen as slack if the grid becomes isolated, + but lightsim2grid would simply return nan as results for such a contingency situation. + + **WARNING:** continuous bus indices, 0-start, are required! The results will written for the following additional variables: table res_bus with columns "max_vm_pu", "min_vm_pu", @@ -159,6 +163,7 @@ def run_contingency_ls2g(net, nminus1_cases, contingency_evaluation_function=pp. INPUT ---------- + **net** - pandapowerNet **nminus1_cases** - dict describes all N-1 cases, e.g. {"line": {"index": [1, 2, 3]}, "trafo": {"index": [0]}} diff --git a/pandapower/converter/powerfactory/pf_export_functions.py b/pandapower/converter/powerfactory/pf_export_functions.py index 5164706ba..3e170ac15 100644 --- a/pandapower/converter/powerfactory/pf_export_functions.py +++ b/pandapower/converter/powerfactory/pf_export_functions.py @@ -21,6 +21,7 @@ def create_network_dict(app, flag_graphics='GPS'): 'ElmAsm', 'ElmShnt', 'ElmVac', + 'ElmSvs', 'ElmVsc', 'ElmVscmono', @@ -56,6 +57,7 @@ def create_network_dict(app, flag_graphics='GPS'): 'ElmPvsys': ['W', 'var', 'VA'], 'ElmXnet': ['W', 'var', 'VA'], 'ElmSym': ['W', 'var', 'VA'], + 'ElmSvs': ['W', 'var', 'VA'], 'ElmAsm': ['W', 'var', 'VA'], 'ElmShnt': ['W', 'var', 'VA'], 'ElmZpu': ['W', 'var', 'VA'], diff --git a/pandapower/converter/powerfactory/pp_import_functions.py b/pandapower/converter/powerfactory/pp_import_functions.py index 8db54c564..ed750deb8 100644 --- a/pandapower/converter/powerfactory/pp_import_functions.py +++ b/pandapower/converter/powerfactory/pp_import_functions.py @@ -187,30 +187,43 @@ def from_pf( for n, fuse in enumerate(dict_net['RelFuse'], 1): create_coup(net=net, item=fuse, is_fuse=True) if n > 0: logger.info('imported %d fuses' % n) - + + logger.debug('creating shunts') # create shunts (ElmShnt): n = 0 for n, shunt in enumerate(dict_net['ElmShnt'], 1): create_shunt(net=net, item=shunt) if n > 0: logger.info('imported %d shunts' % n) - + + logger.debug('creating impedances') # create zpu (ElmZpu): n = 0 for n, zpu in enumerate(dict_net['ElmZpu'], 1): create_zpu(net=net, item=zpu) if n > 0: logger.info('imported %d impedances' % n) - + + logger.debug('creating series inductivity as impedance') # create series inductivity as impedance (ElmSind): n = 0 for n, sind in enumerate(dict_net['ElmSind'], 1): create_sind(net=net, item=sind) if n > 0: logger.info('imported %d SIND' % n) + + logger.debug('creating series capacity as impedance') # create series capacity as impedance (ElmScap): n = 0 for n, scap in enumerate(dict_net['ElmScap'], 1): create_scap(net=net, item=scap) if n > 0: logger.info('imported %d SCAP' % n) - + + logger.debug('creating static var compensator') + # create static var compensator (SVC) with control same as voltage controlled synchron machine (ElmSvs): + n = 0 + for n, svc in enumerate(dict_net['ElmSvs'], 1): + create_svc(net=net, item=svc, pv_as_slack=pv_as_slack, + pf_variable_p_gen=pf_variable_p_gen, dict_net=dict_net) + if n > 0: logger.info('imported %d SVC' % n) + # create vac (ElmVac): n = 0 for n, vac in enumerate(dict_net['ElmVac'], 1): @@ -625,6 +638,8 @@ def import_switch(item, idx_cubicle): def create_connection_switches(net, item, number_switches, et, buses, elements): # False if open, True if closed, None if no switch logger.debug('creating connection switches') + new_switch_idx = [] + new_switch_closed = [] for i in range(number_switches): switch_is_closed, switch_usage, switch_name = import_switch(item, i) logger.debug('switch closed: %s, switch_usage: %s' % (switch_is_closed, switch_usage)) @@ -632,6 +647,9 @@ def create_connection_switches(net, item, number_switches, et, buses, elements): cd = pp.create_switch(net, bus=buses[i], element=elements[i], et=et, closed=switch_is_closed, type=switch_usage, name=switch_name) net.res_switch.loc[cd, ['pf_closed', 'pf_in_service']] = switch_is_closed, True + new_switch_idx.append(cd) + new_switch_closed.append(switch_is_closed) + return new_switch_idx, new_switch_closed def get_coords_from_buses(net, from_bus, to_bus, **kwargs): @@ -754,8 +772,14 @@ def create_line(net, item, flag_graphics, create_sections, is_unbalanced): net[line_table].loc[sid_list, "equipment"] = item.for_name if ac: - create_connection_switches(net, item, 2, 'l', (params['bus1'], params['bus2']), - (sid_list[0], sid_list[-1])) + new_elements = (sid_list[0], sid_list[-1]) + new_switch_idx, new_switch_closed = create_connection_switches(net, item, 2, 'l', (params['bus1'], params['bus2']), + new_elements) + # correct in_service of lines if station switch is open + # update_in_service_depending_station_switch(net, element_type="line", + # new_elements=new_elements, + # new_switch_idx=new_switch_idx, + # new_switch_closed=new_switch_closed) logger.debug('line <%s> created' % params['name']) @@ -1723,7 +1747,6 @@ def create_load(net, item, pf_variable_p_loads, dict_net, is_unbalanced): params["const_i_percent"] = i params["const_z_percent"] = z - ### for now - don't import ElmLodlvp elif load_class == 'ElmLodlvp': parent = item.fold_id @@ -2423,8 +2446,15 @@ def create_trafo(net, item, export_controller=True, tap_opt="nntap", is_unbalanc get_pf_trafo_results(net, item, tid, is_unbalanced) # adding switches - # False if open, True if closed, None if no switch - create_connection_switches(net, item, 2, 't', (bus1, bus2), (tid, tid)) + # False if open, True if closed, None if no switch + new_elements = (tid, tid) + new_switch_idx, new_switch_closed = create_connection_switches(net, item, 2, 't', (bus1, bus2), + new_elements) + # correct in_service of trafo if station switch is open + # update_in_service_depending_station_switch(net, element_type="trafo", + # new_elements=new_elements, + # new_switch_idx=new_switch_idx, + # new_switch_closed=new_switch_closed) # adding tap changer if (export_controller and pf_type.itapch and item.HasAttribute('ntrcn') and @@ -2596,7 +2626,15 @@ def create_trafo3w(net, item, tap_opt='nntap'): # adding switches # False if open, True if closed, None if no switch - create_connection_switches(net, item, 3, 't3', (bus1, bus2, bus3), (tid, tid, tid)) + new_elements = (tid, tid, tid) + new_switch_idx, new_switch_closed = create_connection_switches(net, item, 3, 't3', + (bus1, bus2, bus3), new_elements) + + # correct in_service of trafo3w if station switch is open + # update_in_service_depending_station_switch(net, element_type="trafo3w", + # new_elements=new_elements, + # new_switch_idx=new_switch_idx, + # new_switch_closed=new_switch_closed) logger.debug('successfully created trafo3w from parameters: %d' % tid) # testen @@ -2811,8 +2849,8 @@ def create_zpu(net, item): # net, from_bus, to_bus, r_pu, x_pu, sn_Mva, name=None, in_service=True, index=None params = { 'name': item.loc_name, - 'from_bus': bus1, - 'to_bus': bus2, + # 'from_bus': bus1, + # 'to_bus': bus2, 'rft_pu': item.r_pu, 'xft_pu': item.x_pu, 'rtf_pu': item.r_pu_ji, @@ -2834,9 +2872,49 @@ def create_zpu(net, item): } logger.debug('params = %s' % params) + + # create auxilary buses + aux_bus1 = pp.create_bus(net, vn_kv=net.bus.vn_kv.at[bus1], name=net.bus.name.at[bus1]+'_aux', + geodata=net.bus.geo.at[bus1], type="b", zone=net.bus.zone.at[bus1], + in_service=True) + params['from_bus'] = aux_bus1 + aux_bus2 = pp.create_bus(net, vn_kv=net.bus.vn_kv.at[bus2], name=net.bus.name.at[bus2]+'_aux', + geodata=net.bus.geo.at[bus2], type="b", zone=net.bus.zone.at[bus2], + in_service=True) + params['to_bus'] = aux_bus2 + xid = pp.create_impedance(net, **params) add_additional_attributes(item, net, element='impedance', element_id=xid, attr_list=["cpSite.loc_name"], attr_dict={"cimRdfId": "origin_id"}) + + # consider and create station switches + new_elements = (aux_bus1, aux_bus2) + new_switch_idx, new_switch_closed = create_connection_switches(net, item, 2, 'b', (bus1, bus2), + new_elements) + + if len(new_switch_idx)==0: + net.impedance.loc[xid, 'from_bus'] = bus1 + net.impedance.loc[xid, 'to_bus'] = bus2 + # drop auxilary buses, not needed + pp.drop_buses(net, buses=[aux_bus1, aux_bus2]) + elif len(new_switch_idx)==1: + sw_bus = net.switch.loc[new_switch_idx[0], 'bus'] + if sw_bus==bus1: + net.impedance.loc[xid, 'to_bus'] = bus2 + # drop one auxilary bus, where no switch exists, not needed + pp.drop_buses(net, buses=[aux_bus2]) + elif sw_bus==bus2: + net.impedance.loc[xid, 'from_bus'] = bus1 + # drop one auxilary bus, where no switch exists, not needed + pp.drop_buses(net, buses=[aux_bus1]) + + # correct in_service of series reactor if station switch is open + # update_in_service_depending_station_switch(net, element_type="impedance", + # new_elements=new_elements, + # new_switch_idx=new_switch_idx, + # new_switch_closed=new_switch_closed) + + logger.debug('created ZPU %s as impedance at index %d' % (net.impedance.at[xid, 'name'], xid)) def create_vac(net, item): @@ -2849,7 +2927,8 @@ def create_vac(net, item): except IndexError: logger.error("Cannot add VAC '%s': not connected" % item.loc_name) return - + + in_service = monopolar_in_service(item) params = { 'name': item.loc_name, 'bus': bus, @@ -2857,7 +2936,7 @@ def create_vac(net, item): 'qs_mvar': item.Qload - item.Qgen, 'pz_mw': item.Pzload, 'qz_mvar': item.Qzload, - 'in_service': not bool(item.outserv) + 'in_service': in_service } if item.itype == 3: @@ -2907,6 +2986,20 @@ def create_vac(net, item): logger.debug('added pf_p and pf_q to {} {}: {}'.format(elm, xid, net['res_' + elm].loc[ xid, ["pf_p", 'pf_q']].values)) +def update_in_service_depending_station_switch(net, element_type, new_elements, new_switch_idx, new_switch_closed): + ### fcn is not used! + if len(new_switch_idx)!= 0: + for i in range(len(new_switch_idx)): + if new_switch_closed[i] == 0: + if net[element_type].loc[new_elements[i], 'in_service']==False: + continue + else: + net[element_type].loc[new_elements[i], 'in_service'] = False + logger.debug('element of element_type %s with index %d is set\ + out of service because station switch is open ' % + (net[element_type].at[new_elements[i], 'name'], new_elements[i])) + else: + pass def create_sind(net, item): # series reactor is modelled as per-unit impedance, values in Ohm are calculated into values in @@ -2916,12 +3009,47 @@ def create_sind(net, item): except IndexError: logger.error("Cannot add Sind '%s': not connected" % item.loc_name) return - - sind = pp.create_series_reactor_as_impedance(net, from_bus=bus1, to_bus=bus2, r_ohm=item.rrea, - x_ohm=item.xrea, sn_mva=item.Sn, + + # create auxilary buses + aux_bus1 = pp.create_bus(net, vn_kv=net.bus.vn_kv.at[bus1], name=net.bus.name.at[bus1]+'_aux', + geodata=net.bus.geo.at[bus1], type="b", zone=net.bus.zone.at[bus1], + in_service=True) + aux_bus2 = pp.create_bus(net, vn_kv=net.bus.vn_kv.at[bus2], name=net.bus.name.at[bus2]+'_aux', + geodata=net.bus.geo.at[bus2], type="b", zone=net.bus.zone.at[bus2], + in_service=True) + + sind = pp.create_series_reactor_as_impedance(net, from_bus=aux_bus1, to_bus=aux_bus2, + r_ohm=item.rrea, x_ohm=item.xrea, sn_mva=item.Sn, name=item.loc_name, in_service=not bool(item.outserv)) - + + # consider and create station switches + new_elements = (aux_bus1, aux_bus2) + new_switch_idx, new_switch_closed = create_connection_switches(net, item, 2, 'b', (bus1, bus2), + new_elements) + + if len(new_switch_idx)==0: + net.impedance.loc[sind, 'from_bus'] = bus1 + net.impedance.loc[sind, 'to_bus'] = bus2 + # drop auxilary buses, not needed + pp.drop_buses(net, buses=[aux_bus1, aux_bus2]) + elif len(new_switch_idx)==1: + sw_bus = net.switch.loc[new_switch_idx[0], 'bus'] + if sw_bus==bus1: + net.impedance.loc[sind, 'to_bus'] = bus2 + # drop one auxilary bus, where no switch exists, not needed + pp.drop_buses(net, buses=[aux_bus2]) + elif sw_bus==bus2: + net.impedance.loc[sind, 'from_bus'] = bus1 + # drop one auxilary bus, where no switch exists, not needed + pp.drop_buses(net, buses=[aux_bus1]) + + # correct in_service of series reactor if station switch is open + # update_in_service_depending_station_switch(net, element_type="impedance", + # new_elements=new_elements, + # new_switch_idx=new_switch_idx, + # new_switch_closed=new_switch_closed) + logger.debug('created series reactor %s as per unit impedance at index %d' % (net.impedance.at[sind, 'name'], sind)) @@ -2930,23 +3058,101 @@ def create_scap(net, item): # series capacitor is modelled as per-unit impedance, values in Ohm are calculated into values in # per unit at creation try: - (bus1, bus2) = get_connection_nodes(net, item, 2) + (bus1, bus2), _ = get_connection_nodes(net, item, 2) except IndexError: logger.error("Cannot add Scap '%s': not connected" % item.loc_name) return - if (item.gcap == 0) or (item.bcap == 0): + if (item.gcap==0) and (item.bcap==0): logger.info('not creating series capacitor for %s' % item.loc_name) else: - r_ohm = item.gcap / (item.gcap ** 2 + item.bcap ** 2) - x_ohm = -item.bcap / (item.gcap ** 2 + item.bcap ** 2) - scap = pp.create_series_reactor_as_impedance(net, from_bus=bus1, to_bus=bus2, r_ohm=r_ohm, + r_ohm = item.gcap/(item.gcap**2 + item.bcap**2) + x_ohm = -item.bcap/(item.gcap**2 + item.bcap**2) + + # create auxilary buses + aux_bus1 = pp.create_bus(net, vn_kv=net.bus.vn_kv.at[bus1], name=net.bus.name.at[bus1]+'_aux', + geodata=net.bus.geo.at[bus1], type="b", zone=net.bus.zone.at[bus1], + in_service=True) + aux_bus2 = pp.create_bus(net, vn_kv=net.bus.vn_kv.at[bus2], name=net.bus.name.at[bus2]+'_aux', + geodata=net.bus.geo.at[bus2], type="b", zone=net.bus.zone.at[bus2], + in_service=True) + + scap = pp.create_series_reactor_as_impedance(net, from_bus=aux_bus1, to_bus=aux_bus2, r_ohm=r_ohm, x_ohm=x_ohm, sn_mva=item.Sn, name=item.loc_name, in_service=not bool(item.outserv)) + + # consider and create station switches + new_elements = (aux_bus1, aux_bus2) + new_switch_idx, new_switch_closed = create_connection_switches(net, item, 2, 'b', (bus1, bus2), + new_elements) + + if len(new_switch_idx)==0: + net.impedance.loc[scap, 'from_bus'] = bus1 + net.impedance.loc[scap, 'to_bus'] = bus2 + # drop auxilary buses, not needed + pp.drop_buses(net, buses=[aux_bus1, aux_bus2]) + elif len(new_switch_idx)==1: + sw_bus = net.switch.loc[new_switch_idx[0], 'bus'] + if sw_bus==bus1: + net.impedance.loc[scap, 'to_bus'] = bus2 + # drop one auxilary bus, where no switch exists, not needed + pp.drop_buses(net, buses=[aux_bus2]) + elif sw_bus==bus2: + net.impedance.loc[scap, 'from_bus'] = bus1 + # drop one auxilary bus, where no switch exists, not needed + pp.drop_buses(net, buses=[aux_bus1]) + + # correct in_service of series capacitor if station switch is open + # update_in_service_depending_station_switch(net, element_type="impedance", + # new_elements=new_elements, + # new_switch_idx=new_switch_idx, + # new_switch_closed=new_switch_closed) logger.debug('created series capacitor %s as per unit impedance at index %d' % (net.impedance.at[scap, 'name'], scap)) + +def create_svc(net, item, pv_as_slack, pf_variable_p_gen, dict_net): + # SVC is voltage controlled and therefore modelled the same way as a voltage controlled synchron machine (gen) + # TODO: at least implement a uncontrolled svc as synchron machine with const. Q + # TODO: transfer item entries for usage of pp.create_svc, x_l_ohm, x_cvar_ohm, + # thyristor_firing_angle must be computed + name = item.loc_name + sid = None + element = None + logger.debug('>> creating synchronous machine <%s>' % name) + + try: + bus1 = get_connection_nodes(net, item, 1) + except IndexError: + logger.error("Cannot add SVC '%s': not connected" % name) + return + + if item.i_ctrl==1: # 0: no control, 1: voltage control, 2: reactive power control + logger.debug('creating SVC %s as gen' % name) + vm_pu = item.usetp + in_service = monopolar_in_service(item) + svc = pp.create_gen(net, bus=bus1, p_mw=0, vm_pu=vm_pu, + name=name, type="SVC", in_service=in_service) + element = 'gen' + + if svc is None or element is None: + logger.error('Error! SVC not created') + logger.debug('created svc at index <%s>' % svc) + + net[element].loc[svc, 'description'] = ' \n '.join(item.desc) if len(item.desc) > 0 else '' + add_additional_attributes(item, net, element, svc, attr_dict={"for_name": "equipment"}, + attr_list=["sernum", "chr_name", "cpSite.loc_name"]) + + if item.HasResults(0): # 'm' results... + logger.debug('<%s> has results' % name) + net['res_' + element].at[svc, "pf_p"] = ga(item, 'm:P:bus1') #* multiplier + net['res_' + element].at[svc, "pf_q"] = ga(item, 'm:Q:bus1') #* multiplier + else: + net['res_' + element].at[svc, "pf_p"] = np.nan + net['res_' + element].at[svc, "pf_q"] = np.nan + else: + logger.info('not creating SVC for %s' % item.loc_name) def _get_vsc_control_modes(item, mono=True): diff --git a/pandapower/create.py b/pandapower/create.py index f22e7a89d..83a54918f 100644 --- a/pandapower/create.py +++ b/pandapower/create.py @@ -14,7 +14,8 @@ from pandas.api.types import is_object_dtype from pandapower._version import __version__, __format_version__ -from pandapower.auxiliary import pandapowerNet, get_free_id, _preserve_dtypes, ensure_iterability +from pandapower.auxiliary import pandapowerNet, get_free_id, _preserve_dtypes, ensure_iterability, \ + empty_defaults_per_dtype from pandapower.results import reset_results from pandapower.std_types import add_basic_std_types, load_std_type import numpy as np @@ -698,7 +699,7 @@ def create_bus(net, vn_kv, name=None, index=None, geodata=None, type="b", zone=N **index** (int) - The unique ID of the created element EXAMPLE: - create_bus(net, name = "bus1") + create_bus(net, 20., name = "bus1") """ index = _get_index_with_check(net, "bus", index) @@ -765,7 +766,7 @@ def create_bus_dc(net, vn_kv, name=None, index=None, geodata=None, type="b", zon **index** (int) - The unique ID of the created element EXAMPLE: - create_bus_dc(net, name = "bus1") + create_bus_dc(net, 20., name = "bus1") """ index = _get_index_with_check(net, "bus_dc", index) @@ -871,6 +872,8 @@ def _geodata_to_geo_series(data: Union[Iterable[Tuple[float, float]], Tuple[int, _add_to_entries_if_not_nan(net, "bus", entries, index, "min_vm_pu", min_vm_pu) _add_to_entries_if_not_nan(net, "bus", entries, index, "max_vm_pu", max_vm_pu) _set_multiple_entries(net, "bus", index, **entries, **kwargs) + net.bus.loc[net.bus.geo == "", "geo"] = None # overwrite + # empty_defaults_per_dtype() applied in _set_multiple_entries() return index @@ -887,20 +890,20 @@ def create_buses_dc(net, nr_buses_dc, vn_kv, index=None, name=None, type="b", ge **nr_buses_dc** (int) - The number of dc buses that is created - OPTIONAL: - **name** (list of string, default None) - the name for this dc bus + **vn_kv** (float) - The grid voltage level. + OPTIONAL: **index** (list of int, default None) - Force specified IDs if available. If None, the indices \ higher than the highest already existing index are selected. + + **name** (list of string, default None) - the name for this dc bus - **vn_kv** (float) - The grid voltage level. + **type** (string, default "b") - Type of the dc bus. "n" - auxilary node, + "b" - busbar, "m" - muff **geodata** ((x,y)-tuple or list of tuples with length == nr_buses_dc, default None) - coordinates used for plotting - **type** (string, default "b") - Type of the dc bus. "n" - auxilary node, - "b" - busbar, "m" - muff - **zone** (string, None) - grid region **in_service** (list of boolean) - True for in_service or False for out of service @@ -919,7 +922,7 @@ def create_buses_dc(net, nr_buses_dc, vn_kv, index=None, name=None, type="b", ge **index** (int) - The unique indices ID of the created elements EXAMPLE: - create_buses_dc(net, name = ["bus1","bus2"]) + create_buses_dc(net, 2, [20., 20.], name = ["bus1","bus2"]) """ index = _get_multiple_index_with_check(net, "bus_dc", index, nr_buses_dc) @@ -958,12 +961,12 @@ def create_load(net, bus, p_mw, q_mvar=0, const_z_percent=0, const_i_percent=0, **bus** (int) - The bus id to which the load is connected - OPTIONAL: - **p_mw** (float, default 0) - The active power of the load + **p_mw** (float) - The active power of the load - positive value -> load - negative value -> generation + OPTIONAL: **q_mvar** (float, default 0) - The reactive power of the load **const_z_percent** (float, default 0) - percentage of p_mw and q_mvar that will be \ @@ -1044,12 +1047,12 @@ def create_loads(net, buses, p_mw, q_mvar=0, const_z_percent=0, const_i_percent= **buses** (list of int) - A list of bus ids to which the loads are connected - OPTIONAL: **p_mw** (list of floats) - The active power of the loads - postive value -> load - negative value -> generation + OPTIONAL: **q_mvar** (list of floats, default 0) - The reactive power of the loads **const_z_percent** (list of floats, default 0) - percentage of p_mw and q_mvar that will \ @@ -1355,7 +1358,7 @@ def create_sgen(net, bus, p_mw, q_mvar=0, sn_mva=nan, name=None, index=None, **index** (int) - The unique ID of the created sgen EXAMPLE: - create_sgen(net, 1, p_mw = -120) + create_sgen(net, 1, p_mw = 120) """ _check_node_element(net, bus) @@ -2016,7 +2019,7 @@ def create_gens(net, buses, p_mw, vm_pu=1., sn_mva=nan, name=None, index=None, m **index** (int) - The unique ID of the created generator EXAMPLE: - create_gen(net, 1, p_mw = 120, vm_pu = 1.02) + create_gens(net, [1, 2], p_mw = [120, 100], vm_pu = [1.02, 0.99]) """ _check_multiple_node_elements(net, buses) @@ -2294,7 +2297,7 @@ def create_line(net, from_bus, to_bus, length_km, std_type, name=None, index=Non **index** (int) - The unique ID of the created line EXAMPLE: - create_line(net, "line1", from_bus = 0, to_bus = 1, length_km=0.1, std_type="NAYY 4x50 SE") + create_line(net, from_bus = 0, to_bus = 1, length_km=0.1, std_type="NAYY 4x50 SE", name = "line1") """ @@ -2430,7 +2433,7 @@ def create_line_dc(net, from_bus_dc, to_bus_dc, length_km, std_type, name=None, **index** (int) - The unique ID of the created dc line EXAMPLE: - create_line_dc(net, "line_dc1", from_bus_dc = 0, to_bus_dc = 1, length_km=0.1, std_type="Not defined yet") + create_line_dc(net, from_bus_dc = 0, to_bus_dc = 1, length_km=0.1, std_type="NAYY 4x50 SE", name = "line_dc1") """ @@ -2560,7 +2563,7 @@ def create_lines(net, from_buses, to_buses, length_km, std_type, name=None, inde **index** (list of int) - The unique ID of the created lines EXAMPLE: - create_lines(net, ["line1", "line2"], from_buses=[0,1], to_buses=[2,3], length_km=0.1, std_type="NAYY 4x50 SE") + create_lines(net, from_buses=[0,1], to_buses=[2,3], length_km=0.1, std_type="NAYY 4x50 SE", name = ["line1", "line2"]) """ _check_multiple_branch_elements(net, from_buses, to_buses, "Lines") @@ -2606,6 +2609,8 @@ def create_lines(net, from_buses, to_buses, length_km, std_type, name=None, inde _add_to_entries_if_not_nan(net, "line", entries, index, column, value, float64) _set_multiple_entries(net, "line", index, **entries, **kwargs) + net.line.loc[net.line.geo == "", "geo"] = None # overwrite + # empty_defaults_per_dtype() applied in _set_multiple_entries() if geodata: _add_multiple_branch_geodata(net, geodata, index) @@ -2690,8 +2695,8 @@ def create_lines_dc(net, from_buses_dc, to_buses_dc, length_km, std_type, name=N **index** (list of int) - The unique ID of the created dc lines EXAMPLE: - create_lines_dc(net, ["line_dc1","line_dc2"], from_buses_dc=[0,1], to_buses_dc=[2,3], length_km=0.1, - std_type="Not specified yet") + create_lines_dc(net, from_buses_dc=[0,1], to_buses_dc=[2,3], length_km=0.1, + std_type="Not specified yet", name = ["line_dc1","line_dc2"]) """ _check_multiple_branch_elements(net, from_buses_dc, to_buses_dc, "Lines_dc", node_name='bus_dc', plural='(all dc buses)') @@ -2831,9 +2836,9 @@ def create_line_from_parameters(net, from_bus, to_bus, length_km, r_ohm_per_km, **index** (int) - The unique ID of the created line EXAMPLE: - create_line_from_parameters(net, "line1", from_bus = 0, to_bus = 1, lenght_km=0.1, + create_line_from_parameters(net, from_bus = 0, to_bus = 1, lenght_km=0.1, r_ohm_per_km = .01, x_ohm_per_km = 0.05, c_nf_per_km = 10, - max_i_ka = 0.4) + max_i_ka = 0.4, name = "line1") """ @@ -2967,8 +2972,8 @@ def create_line_dc_from_parameters(net, from_bus_dc, to_bus_dc, length_km, r_ohm **index** (int) - The unique ID of the created line EXAMPLE: - create_line_dc_from_parameters(net, "line_dc1", from_bus_dc = 0, to_bus_dc = 1, lenght_km=0.1, - r_ohm_per_km = .01, max_i_ka = 0.4) + create_line_dc_from_parameters(net, from_bus_dc = 0, to_bus_dc = 1, lenght_km=0.1, + r_ohm_per_km = .01, max_i_ka = 0.4, name = "line_dc1") """ @@ -3106,8 +3111,8 @@ def create_lines_from_parameters(net, from_buses, to_buses, length_km, r_ohm_per **index** (list of int) - The unique ID of the created lines EXAMPLE: - create_lines_from_parameters(net, ["line1","line2"], from_buses = [0,1], to_buses = [2,3], length_km= 0.1, - r_ohm_per_km = .01, x_ohm_per_km = 0.05, c_nf_per_km = 10, max_i_ka = 0.4) + create_lines_from_parameters(net, from_buses = [0,1], to_buses = [2,3], length_km= 0.1, + r_ohm_per_km = .01, x_ohm_per_km = 0.05, c_nf_per_km = 10, max_i_ka = 0.4, name = ["line1","line2"]) """ _check_multiple_branch_elements(net, from_buses, to_buses, "Lines") @@ -3229,11 +3234,11 @@ def create_lines_dc_from_parameters(net, from_buses_dc, to_buses_dc, length_km, tdpf_delay_s parameter) OUTPUT: - **index** (list of int) - The unique ID of the created dc lines + **index** (list of int) - The list of IDs of the created dc lines EXAMPLE: - create_lines_dc_from_parameters(net, name= ["line_dc1","line_dc2"], from_buses_dc = [0,1], to_buses_dc = [2,3], lenght_km=0.1, - r_ohm_per_km = .01, max_i_ka = 0.4) + create_lines_dc_from_parameters(net, from_buses_dc = [0,1], to_buses_dc = [2,3], lenght_km=0.1, + r_ohm_per_km = .01, max_i_ka = 0.4, name= ["line_dc1","line_dc2"]) """ _check_multiple_branch_elements(net, from_buses_dc, to_buses_dc, "Lines_dc",node_name='bus_dc', plural= '(all dc buses)') @@ -3345,8 +3350,8 @@ def create_transformer(net, hv_bus, lv_bus, std_type, name=None, tap_pos=nan, in **index** (int) - The unique ID of the created transformer EXAMPLE: - create_transformer(net, hv_bus = 0, lv_bus = 1, name = "trafo1", std_type = \ - "0.4 MVA 10/0.4 kV") + create_transformer(net, hv_bus = 0, lv_bus = 1, std_type = "0.4 MVA 10/0.4 kV",\ + name = "trafo1") """ # Check if bus exist to attach the trafo to @@ -3769,10 +3774,10 @@ def create_transformers_from_parameters(net, hv_buses, lv_buses, sn_mva, vn_hv_k ** only considered in loadflow if calculate_voltage_angles = True OUTPUT: - **index** (int) - The unique ID of the created transformer + **index** (int) - The list of IDs of the created transformers EXAMPLE: - create_transformer_from_parameters(net, hv_bus=0, lv_bus=1, name="trafo1", sn_mva=40, \ + create_transformers_from_parameters(net, hv_bus=[0, 1], lv_bus=[2, 3], name="trafo1", sn_mva=40, \ vn_hv_kv=110, vn_lv_kv=10, vk_percent=10, vkr_percent=0.3, pfe_kw=30, \ i0_percent=0.1, shift_degree=30) """ @@ -4293,7 +4298,7 @@ def create_transformers3w_from_parameters( **trafo_id** - List of trafo_ids of the created 3W transformers Example: - create_transformer3w_from_parameters(net, hv_bus=0, mv_bus=1, lv_bus=2, name="trafo1", + create_transformers3w_from_parameters(net, hv_bus=[0, 3], mv_bus=[1, 4], lv_bus=[2, 5], name="trafo1", sn_hv_mva=40, sn_mv_mva=20, sn_lv_mva=20, vn_hv_kv=110, vn_mv_kv=20, vn_lv_kv=10, vk_hv_percent=10,vk_mv_percent=11, vk_lv_percent=12, vkr_hv_percent=0.3, vkr_mv_percent=0.31, vkr_lv_percent=0.32, pfe_kw=30, i0_percent=0.1, shift_mv_degree=30, @@ -4401,9 +4406,9 @@ def create_switch(net, bus, element, et, closed=True, type=None, name=None, inde **sid** - The unique switch_id of the created switch EXAMPLE: - create_switch(net, bus = 0, element = 1, et = 'b', type ="LS", z_ohm = 0.1) + create_switch(net, bus=0, element=1, et='b', type="LS", z_ohm=0.1) - create_switch(net, bus = 0, element = 1, et = 'l') + create_switch(net, bus=0, element=1, et='l') """ _check_node_element(net, bus) @@ -4485,12 +4490,12 @@ def create_switches(net, buses, elements, et, closed=True, type=None, name=None, normal operating conditions without tripping OUTPUT: - **sid** - The unique switch_id of the created switch + **sid** - List of switch_id of the created switches EXAMPLE: - create_switch(net, bus = 0, element = 1, et = 'b', type ="LS", z_ohm = 0.1) + create_switches(net, buses=[0, 1], element=1, et='b', type="LS", z_ohm=0.1) - create_switch(net, bus = 0, element = 1, et = 'l') + create_switches(net, buses=[0, 1], element=1, et='l') """ index = _get_multiple_index_with_check(net, "switch", index, len(buses), name="Switches") @@ -4611,10 +4616,10 @@ def create_shunts(net, buses, q_mvar, p_mw=0., vn_kv=None, step=1, max_step=1, n index one higher than the highest already existing index is selected. OUTPUT: - **index** (int) - The unique ID of the created shunt + **index** (int) - The list of IDs of the created shunts EXAMPLE: - create_shunt(net, 0, 20) + create_shunts(net, [0, 2], [20, 30]) """ _check_multiple_node_elements(net, buses) @@ -4929,8 +4934,8 @@ def create_impedance(net, from_bus, to_bus, rft_pu, xft_pu, sn_mva, rtf_pu=None, UserWarning If required impedance parameters are missing. """ - - + + index = _get_index_with_check(net, "impedance", index) _check_branch_element(net, "Impedance", index, from_bus, to_bus) @@ -5111,7 +5116,7 @@ def create_impedances(net, from_buses, to_buses, rft_pu, xft_pu, sn_mva, rtf_pu= entries = dict(zip(columns, values)) _set_multiple_entries(net, "impedance", index, **entries, **kwargs) - + if rft0_pu is not None: _set_value_if_not_nan(net, index, rft0_pu, "rft0_pu", "impedance") _set_value_if_not_nan(net, index, xft0_pu, "xft0_pu", "impedance") @@ -5597,7 +5602,8 @@ def create_pwl_costs(net, elements, et, points, power_type="p", index=None, chec To create a gen with costs of 1€/MW between 0 and 20 MW and 2€/MW between 20 and 30: - create_pwl_cost(net, 0, "gen", [[0, 20, 1], [20, 30, 2]]) + create_pwl_costs(net, [0, 1], ["gen", "sgen"], [[[0, 20, 1], [20, 30, 2]], \ + [[0, 20, 1], [20, 30, 2]]]) """ if not hasattr(elements, "__iter__") and not isinstance(elements, str): raise ValueError(f"An iterable is expected for elements, not {elements}.") @@ -5820,7 +5826,9 @@ def create_group(net, element_types, element_indices, name="", reference_columns entries = dict(zip(["name", "element_type", "element_index", "reference_column"], [name, element_types, element_indices, reference_columns])) - _set_multiple_entries(net, "group", index, **entries, **kwargs) + _set_multiple_entries(net, "group", index, **entries) + net.group.loc[net.group.reference_column == "", "reference_column"] = None # overwrite + # empty_defaults_per_dtype() applied in _set_multiple_entries() return index[0] @@ -6074,7 +6082,14 @@ def check_entry(val): net[table][col] = val # extend the table by the frame we just created - net[table] = pd.concat([net[table], dd[dd.columns[~dd.isnull().all()]]], sort=False) + if len(net[table]): + net[table] = pd.concat([net[table], dd[dd.columns[~dd.isnull().all()]]], sort=False) + else: + dd_columns = dd.columns[~dd.isnull().all()] + complete_columns = list(net[table].columns)+list(dd_columns.difference(net[table].columns)) + empty_dict = {key: empty_defaults_per_dtype(dtype) for key, dtype in net[table][net[ + table].columns.difference(dd_columns)].dtypes.to_dict().items()} + net[table] = dd[dd_columns].assign(**empty_dict)[complete_columns] # and preserve dtypes if preserve_dtypes: diff --git a/pandapower/results_gen.py b/pandapower/results_gen.py index 865efefcf..9851eeead 100644 --- a/pandapower/results_gen.py +++ b/pandapower/results_gen.py @@ -170,7 +170,7 @@ def _get_ext_grid_results_3ph(net, ppc0, ppc1, ppc2): eg_bus_idx_ppc = np.real(ppc1["gen"][eg_idx_ppc, GEN_BUS]).astype(np.int64) # read results from ppc for these buses V012 = np.array(np.zeros((3, n_res_eg)),dtype = np.complex128) - V012[:, eg_is_idx] = np.array([ppc["bus"][eg_bus_idx_ppc, VM] * ppc["bus"][eg_bus_idx_ppc, BASE_KV] + V012[:, eg_idx_ppc] = np.array([ppc["bus"][eg_bus_idx_ppc, VM] * ppc["bus"][eg_bus_idx_ppc, BASE_KV] * np.exp(1j * np.deg2rad(ppc["bus"][eg_bus_idx_ppc, VA])) for ppc in [ppc0, ppc1, ppc2]]) diff --git a/pandapower/sql_io.py b/pandapower/sql_io.py index 1175809f7..198ca8a5d 100644 --- a/pandapower/sql_io.py +++ b/pandapower/sql_io.py @@ -87,7 +87,8 @@ def download_sql_table(cursor, table_name, **id_columns): colnames = [desc[0] for desc in cursor.description] table = cursor.fetchall() df = pd.DataFrame(table, columns=colnames) - df = df.fillna(np.nan) + with pd.option_context('future.no_silent_downcasting', True): + df = df.fillna(np.nan).infer_objects() index_name = f"{table_name.split('.')[-1]}_id" if index_name in df.columns: df = df.set_index(index_name) diff --git a/pandapower/test/api/test_create.py b/pandapower/test/api/test_create.py index 1e1ed8dd7..955a1044a 100644 --- a/pandapower/test/api/test_create.py +++ b/pandapower/test/api/test_create.py @@ -1487,6 +1487,8 @@ def test_create_storages(): net.storage.test_kwargs.values == ["dummy_string_1", "dummy_string_2", "dummy_string_3"] ) + for col in ["name", "type"]: + net.storage.loc[net.storage[col].isnull(), col] = "" assert pp.nets_equal(net, net_bulk) diff --git a/pandapower/test/api/test_group.py b/pandapower/test/api/test_group.py index ab9761a2c..4834d3324 100644 --- a/pandapower/test/api/test_group.py +++ b/pandapower/test/api/test_group.py @@ -331,7 +331,7 @@ def test_detach_and_compare(): pp.detach_from_group(net, 3, "trafo", 1) assert pp.group_element_lists(net, 3)[0] == ["trafo"] assert pp.group_element_lists(net, 3)[1] == [typed_list([0, 2], type_)] - assert pp.group_element_lists(net, 3)[2] == [np.nan if type_ is int else "name"] + assert pp.group_element_lists(net, 3)[2] == [None if type_ is int else "name"] def test_res_power(): diff --git a/pandapower/test/loadflow/test_runpp.py b/pandapower/test/loadflow/test_runpp.py index e6c7e764b..dfc117377 100644 --- a/pandapower/test/loadflow/test_runpp.py +++ b/pandapower/test/loadflow/test_runpp.py @@ -1472,5 +1472,18 @@ def test_lightsim2grid_option(): assert net._options["lightsim2grid"] +def test_at_isolated_bus(): + net = pp.create_empty_network() + pp.create_buses(net, 4, 110) + pp.create_ext_grid(net, 0) + + pp.create_line_from_parameters(net, 0, 1, 30, 0.0487, 0.13823, 160, 0.664) + + pp.create_gen(net, 3, 0, vm_pu=0, in_service=False) + + pp.runpp(net) + assert net._options["init_vm_pu"] == 1. + + if __name__ == "__main__": pytest.main([__file__, "-xs"]) \ No newline at end of file diff --git a/pandapower/toolbox/comparison.py b/pandapower/toolbox/comparison.py index 2f5a0b97d..0d805d38d 100644 --- a/pandapower/toolbox/comparison.py +++ b/pandapower/toolbox/comparison.py @@ -50,7 +50,9 @@ def dataframes_equal(df1, df2, ignore_index_order=True, **kwargs): if "geo" in df1.columns and "geo" in df2.columns: not_eq_warn = "DataFrames do not match in column 'geo'." notnull1 = df1.geo.index[~df1.geo.isnull()] + notnull1 = df1.geo.loc[notnull1].index[df1.geo.loc[notnull1].apply(len).astype(bool)] notnull2 = df2.geo.index[~df2.geo.isnull()] + notnull2 = df2.geo.loc[notnull2].index[df2.geo.loc[notnull2].apply(len).astype(bool)] if len(notnull1) + len(notnull2) == 0: return True diff --git a/pandapower/toolbox/grid_modification.py b/pandapower/toolbox/grid_modification.py index 9652b2671..512a4ff0b 100644 --- a/pandapower/toolbox/grid_modification.py +++ b/pandapower/toolbox/grid_modification.py @@ -1099,26 +1099,14 @@ def replace_impedance_by_line(net, index=None, only_valid_replace=True, max_i_ka _replace_group_member_element_type(net, index, "impedance", new_index, "line", detach_from_gr=False) drop_elements_simple(net, "impedance", index) - return new_index + # --- result data + _adapt_result_tables_in_replace_functions(net, "impedance", index, "line", new_index) -def _replace_group_member_element_type( - net, old_elements, old_element_type, new_elements, new_element_type, detach_from_gr=True): - assert not isinstance(old_element_type, set) - assert not isinstance(new_element_type, set) - old_elements = pd.Series(old_elements) - new_elements = pd.Series(new_elements) + # --- adapt profiles + _adapt_profiles_in_replace_functions(net, "impedance", index, "line", new_index) - check_unique_group_rows(net) - gr_et = net.group.loc[net.group.element_type == old_element_type] - for gr_index in gr_et.index: - isin = old_elements.isin(gr_et.at[gr_index, "element_index"]) - if any(isin): - attach_to_group(net, gr_index, new_element_type, [new_elements.loc[isin].tolist()], - reference_columns=gr_et.at[gr_index, "reference_column"]) - if detach_from_gr: - detach_from_groups(net, old_element_type, old_elements) # sometimes done afterwarts when - # dropping the old elements + return new_index def replace_line_by_impedance(net, index=None, sn_mva=None, only_valid_replace=True): @@ -1180,6 +1168,13 @@ def replace_line_by_impedance(net, index=None, sn_mva=None, only_valid_replace=T _replace_group_member_element_type(net, index, "line", new_index, "impedance", detach_from_gr=False) drop_lines(net, index) + + # --- result data + _adapt_result_tables_in_replace_functions(net, "line", index, "impedance", new_index) + + # --- adapt profiles + _adapt_profiles_in_replace_functions(net, "line", index, "impedance", new_index) + return new_index @@ -1264,12 +1259,11 @@ def replace_ext_grid_by_gen(net, ext_grids=None, gen_indices=None, slack=False, new_idx, net[table]["element"].dtypes) # --- result data - if net.res_ext_grid.shape[0]: - in_res = pd.Series(ext_grids).isin(net["res_ext_grid"].index).values - to_add = net.res_ext_grid.loc[pd.Index(ext_grids)[in_res]] - to_add.index = pd.Index(new_idx)[in_res] - net.res_gen = pd.concat([net.res_gen, to_add], sort=True) - net.res_ext_grid = net.res_ext_grid.drop(pd.Index(ext_grids)[in_res]) + _adapt_result_tables_in_replace_functions(net, "ext_grid", ext_grids, "gen", new_idx) + + # --- adapt profiles + _adapt_profiles_in_replace_functions(net, "ext_grid", ext_grids, "gen", new_idx) + return new_idx @@ -1346,12 +1340,11 @@ def replace_gen_by_ext_grid(net, gens=None, ext_grid_indices=None, cols_to_keep= net[table].loc[to_change, "element"] = new_idx # --- result data - if net.res_gen.shape[0]: - in_res = pd.Series(gens).isin(net["res_gen"].index).values - to_add = net.res_gen.loc[pd.Index(gens)[in_res]] - to_add.index = pd.Index(new_idx)[in_res] - net.res_ext_grid = pd.concat([net.res_ext_grid, to_add], sort=True) - net.res_gen = net.res_gen.drop(pd.Index(gens)[in_res]) + _adapt_result_tables_in_replace_functions(net, "gen", gens, "ext_grid", new_idx) + + # --- adapt profiles + _adapt_profiles_in_replace_functions(net, "gen", gens, "ext_grid", new_idx) + return new_idx @@ -1431,12 +1424,11 @@ def replace_gen_by_sgen(net, gens=None, sgen_indices=None, cols_to_keep=None, new_idx, net[table]["element"].dtypes) # --- result data - if net.res_gen.shape[0]: - in_res = pd.Series(gens).isin(net["res_gen"].index).values - to_add = net.res_gen.loc[pd.Index(gens)[in_res]] - to_add.index = pd.Index(new_idx)[in_res] - net.res_sgen = pd.concat([net.res_sgen, to_add], sort=True) - net.res_gen = net.res_gen.drop(pd.Index(gens)[in_res]) + _adapt_result_tables_in_replace_functions(net, "gen", gens, "sgen", new_idx) + + # --- adapt profiles + _adapt_profiles_in_replace_functions(net, "gen", gens, "sgen", new_idx) + return new_idx @@ -1531,13 +1523,12 @@ def replace_sgen_by_gen(net, sgens=None, gen_indices=None, cols_to_keep=None, net[table].loc[to_change, "et"] = "gen" net[table].loc[to_change, "element"] = new_idx - # --- result data - if net.res_sgen.shape[0]: - in_res = pd.Series(sgens).isin(net["res_sgen"].index).values - to_add = net.res_sgen.loc[pd.Index(sgens)[in_res]] - to_add.index = pd.Index(new_idx)[in_res] - net.res_gen = pd.concat([net.res_gen, to_add], sort=True) - net.res_sgen = net.res_sgen.drop(pd.Index(sgens)[in_res]) + # --- adapt result data + _adapt_result_tables_in_replace_functions(net, "sgen", sgens, "gen", new_idx) + + # --- adapt profiles + _adapt_profiles_in_replace_functions(net, "sgen", sgens, "gen", new_idx) + return new_idx @@ -1652,14 +1643,14 @@ def replace_pq_elmtype(net, old_element_type, new_element_type, old_indices=None net[table].loc[to_change, "element"] = np.array( new_idx, net[table]["element"].dtypes) - # --- result data - if net["res_" + old_element_type].shape[0]: - in_res = pd.Series(old_indices).isin(net["res_" + old_element_type].index).values - to_add = net["res_" + old_element_type].loc[pd.Index(old_indices)[in_res]] - to_add.index = pd.Index(new_idx)[in_res] - net["res_" + new_element_type] = pd.concat([net["res_" + new_element_type], to_add], sort=True) - net["res_" + old_element_type] = net["res_" + old_element_type].drop(pd.Index(old_indices)[in_res]) + _adapt_result_tables_in_replace_functions( + net, old_element_type, old_indices, new_element_type, new_idx) + + # --- adapt profiles + _adapt_profiles_in_replace_functions( + net, old_element_type, old_indices, new_element_type, new_idx) + return new_idx @@ -1722,8 +1713,7 @@ def replace_ward_by_internal_elements(net, wards=None, log_level="warning"): drop_elements_simple(net, "ward", wards) -def replace_xward_by_internal_elements(net, xwards=None, set_xward_bus_limits=False, - log_level="warning"): +def replace_xward_by_internal_elements(net, xwards=None, set_xward_bus_limits=False): """ Replaces xward by loads, shunts, impedance and generators @@ -1735,9 +1725,6 @@ def replace_xward_by_internal_elements(net, xwards=None, set_xward_bus_limits=Fa indices of xwards which should be replaced. If None, all xwards are replaced, by default None set_xward_bus_limits : bool, optional if True, the buses internal in xwards get vm limits from the connected buses - log_level : str, optional - logging level of the message which element types of net2 got reindexed elements. Options - are, for example "debug", "info", "warning", "error", or None, by default "info" Returns ------- @@ -1775,7 +1762,7 @@ def replace_xward_by_internal_elements(net, xwards=None, set_xward_bus_limits=Fa # --- result data if net.res_xward.shape[0]: log_to_level("Implementations to move xward results to new internal elements are missing.", - logger, log_level) + logger, "info") net.res_xward = net.res_xward.drop(xwards) # --- drop replaced wards @@ -1813,6 +1800,9 @@ def replace_xward_by_ward(net, index=None, drop=True): The function ensures that the group membership and associated element type of the replaced elements are updated accordingly. """ + # TODO: parameter `drop` is implemented only to this replace function. needed if yes why not + # implementing at the other replace functions? + index = list(ensure_iterability(index)) if index is not None else list(net.impedance.index) new_index = [] @@ -1829,3 +1819,56 @@ def replace_xward_by_ward(net, index=None, drop=True): else: net.xward.loc[index, "in_service"] = False return new_index + + +def _replace_group_member_element_type( + net, old_elements, old_element_type, new_elements, new_element_type, detach_from_gr=True): + assert not isinstance(old_element_type, set) + assert not isinstance(new_element_type, set) + old_elements = pd.Series(old_elements) + new_elements = pd.Series(new_elements) + + check_unique_group_rows(net) + gr_et = net.group.loc[net.group.element_type == old_element_type] + for gr_index in gr_et.index: + isin = old_elements.isin(gr_et.at[gr_index, "element_index"]) + if any(isin): + attach_to_group(net, gr_index, new_element_type, [new_elements.loc[isin].tolist()], + reference_columns=gr_et.at[gr_index, "reference_column"]) + if detach_from_gr: + detach_from_groups(net, old_element_type, old_elements) # sometimes done afterwarts when + # dropping the old elements + + +def _adapt_result_tables_in_replace_functions( + net, element_type_old, element_index_old, element_type_new, element_index_new): + et_old, et_new = "res_" + element_type_old, "res_" + element_type_new + idx_old, idx_new = pd.Index(element_index_old), pd.Index(element_index_new) + if net[et_old].shape[0]: + in_res = pd.Series(idx_old).isin(net[et_old].index).values + to_add = net[et_old].loc[idx_old[in_res]] + to_add.index = idx_new[in_res] + net[et_new] = pd.concat([net[et_new], to_add], sort=True) + net[et_old] = net[et_old].drop(idx_old[in_res]) + + +def _adapt_profiles_in_replace_functions( + net, element_type_old, element_index_old, element_type_new, element_index_new + ): + if "profiles" not in net or not isinstance(net.profiles, dict): + return + et_old, et_new = element_type_old, element_type_new + idx_old, idx_new = pd.Index(element_index_old), pd.Index(element_index_new) + + keys_old = [key for key in net.profiles.keys() if ( + key.startswith(f"{et_old}.") or key.startswith(f"res_{et_old}."))] + for key_old in keys_old: + key_new = key_old.replace(et_old, et_new) + in_prof = pd.Series(idx_old).isin(net.profiles[key_old].columns).values + to_add = net.profiles[key_old].loc[:, idx_old[in_prof]] + to_add.columns = idx_new[in_prof] + if key_new in net.profiles.keys(): + net.profiles[key_new] = pd.concat([net.profiles[key_new], to_add], sort=True) + else: + net.profiles[key_new] = to_add + net.profiles[key_old] = net.profiles[key_old].drop(idx_old[in_prof], axis=1) diff --git a/pyproject.toml b/pyproject.toml index 5d19789f9..0d67c6038 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pandapower" -version = "3.0.0.dev0" # File format version '__format_version__' is tracked in _version.py +version = "3.0.0.dev1" # File format version '__format_version__' is tracked in _version.py authors = [ { name = "Leon Thurner", email = "leon.thurner@retoflow.de" }, { name = "Alexander Scheidler", email = "alexander.scheidler@iee.fraunhofer.de" }