Merge branch 'master' into fix/deprecation-warning-pandas-1.4

This commit is contained in:
Fabian Hofmann 2023-02-08 10:06:43 +01:00 committed by GitHub
commit 6a2366ecac
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 7 additions and 25 deletions

View File

@ -17,7 +17,7 @@ repos:
# Sort package imports alphabetically
- repo: https://github.com/PyCQA/isort
rev: 5.11.4
rev: 5.12.0
hooks:
- id: isort
args: ["--profile", "black", "--filter-files"]
@ -51,7 +51,7 @@ repos:
# Formatting with "black" coding style
- repo: https://github.com/psf/black
rev: 22.12.0
rev: 23.1.0
hooks:
# Format Python files
- id: black
@ -74,7 +74,7 @@ repos:
# Format Snakemake rule / workflow files
- repo: https://github.com/snakemake/snakefmt
rev: v0.8.0
rev: v0.8.1
hooks:
- id: snakefmt

View File

@ -394,12 +394,10 @@ def attach_conventional_generators(
)
for carrier in conventional_config:
# Generators with technology affected
idx = n.generators.query("carrier == @carrier").index
for attr in list(set(conventional_config[carrier]) & set(n.generators)):
values = conventional_config[carrier][attr]
if f"conventional_{carrier}_{attr}" in conventional_inputs:
@ -498,7 +496,7 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **con
e_target = hydro_stats["E_store[TWh]"].clip(lower=0.2) * 1e6
e_installed = hydro.eval("p_nom * max_hours").groupby(hydro.country).sum()
e_missing = e_target - e_installed
missing_mh_i = hydro.query("max_hours == 0").index
missing_mh_i = hydro.query("max_hours.isnull()").index
if hydro_max_hours == "energy_capacity_totals_by_country":
# watch out some p_nom values like IE's are totally underrepresented
@ -511,6 +509,8 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **con
hydro_stats["E_store[TWh]"] * 1e3 / hydro_stats["p_nom_discharge[GW]"]
)
max_hours_country.clip(0, inplace=True)
missing_countries = pd.Index(hydro["country"].unique()).difference(
max_hours_country.dropna().index
)

View File

@ -694,7 +694,6 @@ def base_network(
parameter_corrections,
config,
):
buses = _load_buses_from_eg(eg_buses, europe_shape, config["electricity"])
links = _load_links_from_eg(buses, eg_links)

View File

@ -268,7 +268,6 @@ def manual_adjustment(load, fn_load, powerstatistics):
if __name__ == "__main__":
if "snakemake" not in globals():
from _helpers import mock_snakemake

View File

@ -80,7 +80,6 @@ import pandas as pd
import pycountry as pyc
from _helpers import configure_logging
from shapely.geometry import MultiPolygon, Polygon
from shapely.ops import unary_union
logger = logging.getLogger(__name__)
@ -158,8 +157,7 @@ def country_cover(country_shapes, eez_shapes=None):
shapes = country_shapes
if eez_shapes is not None:
shapes = pd.concat([shapes, eez_shapes])
europe_shape = unary_union(shapes)
europe_shape = shapes.unary_union
if isinstance(europe_shape, MultiPolygon):
europe_shape = max(europe_shape, key=attrgetter("area"))
return Polygon(shell=europe_shape.exterior)

View File

@ -238,7 +238,6 @@ def distribute_clusters(n, n_clusters, focus_weights=None, solver_name="cbc"):
), f"Number of clusters must be {len(N)} <= n_clusters <= {N.sum()} for this selection of countries."
if focus_weights is not None:
total_focus = sum(list(focus_weights.values()))
assert (
@ -396,7 +395,6 @@ def clustering_for_n_clusters(
extended_link_costs=0,
focus_weights=None,
):
bus_strategies, generator_strategies = get_aggregation_strategies(
aggregation_strategies
)

View File

@ -164,7 +164,6 @@ def calculate_curtailment(n, label, curtailment):
def calculate_energy(n, label, energy):
for c in n.iterate_components(n.one_port_components | n.branch_components):
if c.name in {"Generator", "Load", "ShuntImpedance"}:
c_energies = (
c.pnl.p.multiply(n.snapshot_weightings.generators, axis=0)
@ -238,7 +237,6 @@ def calculate_supply(n, label, supply):
load_types = n.buses.carrier.unique()
for i in load_types:
buses = n.buses.query("carrier == @i").index
bus_map = pd.Series(False, index=n.buses.index)
@ -246,7 +244,6 @@ def calculate_supply(n, label, supply):
bus_map.loc[buses] = True
for c in n.iterate_components(n.one_port_components):
items = c.df.index[c.df.bus.map(bus_map)]
if len(items) == 0 or c.pnl.p.empty:
@ -267,9 +264,7 @@ def calculate_supply(n, label, supply):
supply.loc[idx[raw_index], label] = s.values
for c in n.iterate_components(n.branch_components):
for end in ["0", "1"]:
items = c.df.index[c.df["bus" + end].map(bus_map)]
if len(items) == 0 or c.pnl["p" + end].empty:
@ -298,7 +293,6 @@ def calculate_supply_energy(n, label, supply_energy):
load_types = n.buses.carrier.unique()
for i in load_types:
buses = n.buses.query("carrier == @i").index
bus_map = pd.Series(False, index=n.buses.index)
@ -306,7 +300,6 @@ def calculate_supply_energy(n, label, supply_energy):
bus_map.loc[buses] = True
for c in n.iterate_components(n.one_port_components):
items = c.df.index[c.df.bus.map(bus_map)]
if len(items) == 0 or c.pnl.p.empty:
@ -327,9 +320,7 @@ def calculate_supply_energy(n, label, supply_energy):
supply_energy.loc[idx[raw_index], label] = s.values
for c in n.iterate_components(n.branch_components):
for end in ["0", "1"]:
items = c.df.index[c.df["bus" + end].map(bus_map)]
if len(items) == 0 or c.pnl["p" + end].empty:
@ -431,7 +422,6 @@ def calculate_weighted_prices(n, label, weighted_prices):
}
for carrier in link_loads:
if carrier == "electricity":
suffix = ""
elif carrier[:5] == "space":
@ -454,7 +444,6 @@ def calculate_weighted_prices(n, label, weighted_prices):
load = n.loads_t.p_set[buses]
for tech in link_loads[carrier]:
names = n.links.index[n.links.index.to_series().str[-len(tech) :] == tech]
if names.empty:

View File

@ -324,7 +324,6 @@ def simplify_links(n, costs, config, output, aggregation_strategies=dict()):
)
for lbl in labels.value_counts().loc[lambda s: s > 2].index:
for b, buses, links in split_links(labels.index[labels == lbl]):
if len(buses) <= 2:
continue