Merge branch 'master' into ukraine_hackathon

This commit is contained in:
Fabian Neumann 2023-01-30 18:58:55 +01:00 committed by GitHub
commit 5bb35d188e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 25 additions and 24 deletions

View File

@ -39,7 +39,7 @@ repos:
# Make docstrings PEP 257 compliant
- repo: https://github.com/PyCQA/docformatter
rev: v1.5.1
rev: v1.6.0.rc1
hooks:
- id: docformatter
args: ["--in-place", "--make-summary-multi-line", "--pre-summary-newline"]
@ -67,7 +67,7 @@ repos:
# Do YAML formatting (before the linter checks it for misses)
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
rev: v2.5.0
rev: v2.6.0
hooks:
- id: pretty-format-yaml
args: [--autofix, --indent, "2", --preserve-quotes]

View File

@ -12,6 +12,8 @@ Upcoming Release
* Carriers of generators can now be excluded from aggregation in clustering network and simplify network.
* Fix EQ constraint for the case no hydro inflow is available
* Bugfix in the reserve constraint will increase demand related reserve requirements
PyPSA-Eur 0.6.1 (20th September 2022)

View File

@ -28,7 +28,6 @@ def configure_logging(snakemake, skip_handlers=False):
skip_handlers : True | False (default)
Do (not) skip the default handlers created for redirecting output to STDERR and file.
"""
import logging
kwargs = snakemake.config.get("logging", dict()).copy()

View File

@ -111,7 +111,6 @@ def calculate_annuity(n, r):
discount rate of r, e.g. annuity(20, 0.05) * 20 = 1.6
"""
if isinstance(r, pd.Series):
return pd.Series(1 / n, index=r.index).where(
r == 0, r / (1.0 - 1.0 / (1.0 + r) ** n)

View File

@ -69,7 +69,6 @@ def voronoi_partition_pts(points, outline):
-------
polygons : N - ndarray[dtype=Polygon|MultiPolygon]
"""
points = np.asarray(points)
if len(points) == 1:

View File

@ -189,7 +189,6 @@ def manual_adjustment(load, fn_load, powerstatistics, countries):
Manual adjusted and interpolated load time-series with UTC
timestamps x ISO-2 countries
"""
if powerstatistics:
if "MK" in load.columns:
if "AL" not in load.columns or load.AL.isnull().values.all():

View File

@ -222,7 +222,6 @@ def distribute_clusters(n, n_clusters, focus_weights=None, solver_name="cbc"):
"""
Determine the number of clusters per country.
"""
L = (
n.loads_t.p_set.mean()
.groupby(n.loads.bus)

View File

@ -235,7 +235,6 @@ def calculate_supply(n, label, supply):
calculate the max dispatch of each component at the buses where the loads
are attached.
"""
load_types = n.buses.carrier.unique()
for i in load_types:
@ -296,7 +295,6 @@ def calculate_supply_energy(n, label, supply_energy):
calculate the total dispatch of each component at the buses where the loads
are attached.
"""
load_types = n.buses.carrier.unique()
for i in load_types:

View File

@ -111,11 +111,15 @@ def simplify_network_to_380(n):
"""
Fix all lines to a voltage level of 380 kV and remove all transformers.
The function preserves the transmission capacity for each line while updating
its voltage level, line type and number of parallel bundles (num_parallel).
The function preserves the transmission capacity for each line while
updating
its voltage level, line type and number of parallel bundles
(num_parallel).
Transformers are removed and connected components are moved from their
starting bus to their ending bus. The corresponding starting buses are
Transformers are removed and connected components are moved from
their
starting bus to their ending bus. The corresponding starting buses
are
removed as well.
"""
logger.info("Mapping all network lines onto a single 380kV layer")

View File

@ -216,18 +216,21 @@ def add_EQ_constraints(n, o, scaling=1e-1):
.T.groupby(ggrouper, axis=1)
.apply(join_exprs)
)
lhs_spill = (
linexpr(
(
-n.snapshot_weightings.stores * scaling,
get_var(n, "StorageUnit", "spill").T,
if not n.storage_units_t.inflow.empty:
lhs_spill = (
linexpr(
(
-n.snapshot_weightings.stores * scaling,
get_var(n, "StorageUnit", "spill").T,
)
)
.T.groupby(sgrouper, axis=1)
.apply(join_exprs)
)
.T.groupby(sgrouper, axis=1)
.apply(join_exprs)
)
lhs_spill = lhs_spill.reindex(lhs_gen.index).fillna("")
lhs = lhs_gen + lhs_spill
lhs_spill = lhs_spill.reindex(lhs_gen.index).fillna("")
lhs = lhs_gen + lhs_spill
else:
lhs = lhs_gen
define_constraints(n, lhs, ">=", rhs, "equity", "min")
@ -321,7 +324,6 @@ def add_operational_reserve_margin(n, sns, config):
Build reserve margin constraints based on the formulation given in
https://genxproject.github.io/GenX/dev/core/#Reserves.
"""
define_variables(n, 0, np.inf, "Generator", "r", axes=[sns, n.generators.index])
add_operational_reserve_margin_constraint(n, config)