Merge pull request #461 from PyPSA/pre-commit-ci-update-config

[pre-commit.ci] pre-commit autoupdate
This commit is contained in:
Fabian Neumann 2023-01-24 07:58:23 +01:00 committed by GitHub
commit b579f8c5ff
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 14 additions and 16 deletions

View File

@ -39,7 +39,7 @@ repos:
# Make docstrings PEP 257 compliant
- repo: https://github.com/PyCQA/docformatter
rev: v1.5.1
rev: v1.6.0.rc1
hooks:
- id: docformatter
args: ["--in-place", "--make-summary-multi-line", "--pre-summary-newline"]
@ -67,7 +67,7 @@ repos:
# Do YAML formatting (before the linter checks it for misses)
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
rev: v2.5.0
rev: v2.6.0
hooks:
- id: pretty-format-yaml
args: [--autofix, --indent, "2", --preserve-quotes]

View File

@ -28,7 +28,6 @@ def configure_logging(snakemake, skip_handlers=False):
skip_handlers : True | False (default)
Do (not) skip the default handlers created for redirecting output to STDERR and file.
"""
import logging
kwargs = snakemake.config.get("logging", dict()).copy()

View File

@ -111,7 +111,6 @@ def calculate_annuity(n, r):
discount rate of r, e.g. annuity(20, 0.05) * 20 = 1.6
"""
if isinstance(r, pd.Series):
return pd.Series(1 / n, index=r.index).where(
r == 0, r / (1.0 - 1.0 / (1.0 + r) ** n)

View File

@ -69,7 +69,6 @@ def voronoi_partition_pts(points, outline):
-------
polygons : N - ndarray[dtype=Polygon|MultiPolygon]
"""
points = np.asarray(points)
if len(points) == 1:

View File

@ -189,7 +189,6 @@ def manual_adjustment(load, fn_load, powerstatistics):
Manual adjusted and interpolated load time-series with UTC
timestamps x ISO-2 countries
"""
if powerstatistics:
if "MK" in load.columns:
if "AL" not in load.columns or load.AL.isnull().values.all():

View File

@ -222,7 +222,6 @@ def distribute_clusters(n, n_clusters, focus_weights=None, solver_name="cbc"):
"""
Determine the number of clusters per country.
"""
L = (
n.loads_t.p_set.mean()
.groupby(n.loads.bus)

View File

@ -235,7 +235,6 @@ def calculate_supply(n, label, supply):
calculate the max dispatch of each component at the buses where the loads
are attached.
"""
load_types = n.buses.carrier.unique()
for i in load_types:
@ -296,7 +295,6 @@ def calculate_supply_energy(n, label, supply_energy):
calculate the total dispatch of each component at the buses where the loads
are attached.
"""
load_types = n.buses.carrier.unique()
for i in load_types:

View File

@ -111,11 +111,15 @@ def simplify_network_to_380(n):
"""
Fix all lines to a voltage level of 380 kV and remove all transformers.
The function preserves the transmission capacity for each line while updating
its voltage level, line type and number of parallel bundles (num_parallel).
The function preserves the transmission capacity for each line while
updating
its voltage level, line type and number of parallel bundles
(num_parallel).
Transformers are removed and connected components are moved from their
starting bus to their ending bus. The corresponding starting buses are
Transformers are removed and connected components are moved from
their
starting bus to their ending bus. The corresponding starting buses
are
removed as well.
"""
logger.info("Mapping all network lines onto a single 380kV layer")
@ -395,8 +399,10 @@ def simplify_links(n, costs, config, output, aggregation_strategies=dict()):
def remove_stubs(n, costs, config, output, aggregation_strategies=dict()):
logger.info("Removing stubs")
across_borders = config["clustering"]["simplify_network"].get("remove_stubs_across_borders", True)
matching_attrs = [] if across_borders else ['country']
across_borders = config["clustering"]["simplify_network"].get(
"remove_stubs_across_borders", True
)
matching_attrs = [] if across_borders else ["country"]
busmap = busmap_by_stubs(n, matching_attrs)
connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap, costs, config)

View File

@ -321,7 +321,6 @@ def add_operational_reserve_margin(n, sns, config):
Build reserve margin constraints based on the formulation given in
https://genxproject.github.io/GenX/dev/core/#Reserves.
"""
define_variables(n, 0, np.inf, "Generator", "r", axes=[sns, n.generators.index])
add_operational_reserve_margin_constraint(n, config)