[pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci
This commit is contained in:
pre-commit-ci[bot] 2023-01-23 21:27:19 +00:00
parent db51a6eba0
commit 8bdba5653a
8 changed files with 12 additions and 14 deletions

View File

@ -28,7 +28,6 @@ def configure_logging(snakemake, skip_handlers=False):
skip_handlers : True | False (default) skip_handlers : True | False (default)
Do (not) skip the default handlers created for redirecting output to STDERR and file. Do (not) skip the default handlers created for redirecting output to STDERR and file.
""" """
import logging import logging
kwargs = snakemake.config.get("logging", dict()).copy() kwargs = snakemake.config.get("logging", dict()).copy()

View File

@ -111,7 +111,6 @@ def calculate_annuity(n, r):
discount rate of r, e.g. annuity(20, 0.05) * 20 = 1.6 discount rate of r, e.g. annuity(20, 0.05) * 20 = 1.6
""" """
if isinstance(r, pd.Series): if isinstance(r, pd.Series):
return pd.Series(1 / n, index=r.index).where( return pd.Series(1 / n, index=r.index).where(
r == 0, r / (1.0 - 1.0 / (1.0 + r) ** n) r == 0, r / (1.0 - 1.0 / (1.0 + r) ** n)

View File

@ -69,7 +69,6 @@ def voronoi_partition_pts(points, outline):
------- -------
polygons : N - ndarray[dtype=Polygon|MultiPolygon] polygons : N - ndarray[dtype=Polygon|MultiPolygon]
""" """
points = np.asarray(points) points = np.asarray(points)
if len(points) == 1: if len(points) == 1:

View File

@ -189,7 +189,6 @@ def manual_adjustment(load, fn_load, powerstatistics):
Manual adjusted and interpolated load time-series with UTC Manual adjusted and interpolated load time-series with UTC
timestamps x ISO-2 countries timestamps x ISO-2 countries
""" """
if powerstatistics: if powerstatistics:
if "MK" in load.columns: if "MK" in load.columns:
if "AL" not in load.columns or load.AL.isnull().values.all(): if "AL" not in load.columns or load.AL.isnull().values.all():

View File

@ -222,7 +222,6 @@ def distribute_clusters(n, n_clusters, focus_weights=None, solver_name="cbc"):
""" """
Determine the number of clusters per country. Determine the number of clusters per country.
""" """
L = ( L = (
n.loads_t.p_set.mean() n.loads_t.p_set.mean()
.groupby(n.loads.bus) .groupby(n.loads.bus)

View File

@ -235,7 +235,6 @@ def calculate_supply(n, label, supply):
calculate the max dispatch of each component at the buses where the loads calculate the max dispatch of each component at the buses where the loads
are attached. are attached.
""" """
load_types = n.buses.carrier.unique() load_types = n.buses.carrier.unique()
for i in load_types: for i in load_types:
@ -296,7 +295,6 @@ def calculate_supply_energy(n, label, supply_energy):
calculate the total dispatch of each component at the buses where the loads calculate the total dispatch of each component at the buses where the loads
are attached. are attached.
""" """
load_types = n.buses.carrier.unique() load_types = n.buses.carrier.unique()
for i in load_types: for i in load_types:

View File

@ -111,11 +111,15 @@ def simplify_network_to_380(n):
""" """
Fix all lines to a voltage level of 380 kV and remove all transformers. Fix all lines to a voltage level of 380 kV and remove all transformers.
The function preserves the transmission capacity for each line while updating The function preserves the transmission capacity for each line while
its voltage level, line type and number of parallel bundles (num_parallel). updating
its voltage level, line type and number of parallel bundles
(num_parallel).
Transformers are removed and connected components are moved from their Transformers are removed and connected components are moved from
starting bus to their ending bus. The corresponding starting buses are their
starting bus to their ending bus. The corresponding starting buses
are
removed as well. removed as well.
""" """
logger.info("Mapping all network lines onto a single 380kV layer") logger.info("Mapping all network lines onto a single 380kV layer")
@ -395,8 +399,10 @@ def simplify_links(n, costs, config, output, aggregation_strategies=dict()):
def remove_stubs(n, costs, config, output, aggregation_strategies=dict()): def remove_stubs(n, costs, config, output, aggregation_strategies=dict()):
logger.info("Removing stubs") logger.info("Removing stubs")
across_borders = config["clustering"]["simplify_network"].get("remove_stubs_across_borders", True) across_borders = config["clustering"]["simplify_network"].get(
matching_attrs = [] if across_borders else ['country'] "remove_stubs_across_borders", True
)
matching_attrs = [] if across_borders else ["country"]
busmap = busmap_by_stubs(n, matching_attrs) busmap = busmap_by_stubs(n, matching_attrs)
connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap, costs, config) connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap, costs, config)

View File

@ -321,7 +321,6 @@ def add_operational_reserve_margin(n, sns, config):
Build reserve margin constraints based on the formulation given in Build reserve margin constraints based on the formulation given in
https://genxproject.github.io/GenX/dev/core/#Reserves. https://genxproject.github.io/GenX/dev/core/#Reserves.
""" """
define_variables(n, 0, np.inf, "Generator", "r", axes=[sns, n.generators.index]) define_variables(n, 0, np.inf, "Generator", "r", axes=[sns, n.generators.index])
add_operational_reserve_margin_constraint(n, config) add_operational_reserve_margin_constraint(n, config)