update version compatibility handling

This commit is contained in:
Fabian Neumann 2024-02-09 13:59:15 +01:00
parent e0dafc50b3
commit 3691f9f4c1
7 changed files with 25 additions and 40 deletions

View File

@ -45,7 +45,7 @@ if config["foresight"] != "perfect":
( (
LOGS LOGS
+ "plot_power_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log" + "plot_power_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log"
) ),
benchmark: benchmark:
( (
BENCHMARKS BENCHMARKS
@ -74,7 +74,7 @@ if config["foresight"] != "perfect":
( (
LOGS LOGS
+ "plot_hydrogen_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log" + "plot_hydrogen_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log"
) ),
benchmark: benchmark:
( (
BENCHMARKS BENCHMARKS
@ -102,7 +102,7 @@ if config["foresight"] != "perfect":
( (
LOGS LOGS
+ "plot_gas_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log" + "plot_gas_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log"
) ),
benchmark: benchmark:
( (
BENCHMARKS BENCHMARKS

View File

@ -264,7 +264,6 @@ def mock_snakemake(
import os import os
import snakemake as sm import snakemake as sm
from packaging.version import Version, parse
from pypsa.descriptors import Dict from pypsa.descriptors import Dict
from snakemake.script import Snakemake from snakemake.script import Snakemake
@ -290,13 +289,12 @@ def mock_snakemake(
if os.path.exists(p): if os.path.exists(p):
snakefile = p snakefile = p
break break
kwargs = (
dict(rerun_triggers=[]) if parse(sm.__version__) > Version("7.7.0") else {}
)
if isinstance(configfiles, str): if isinstance(configfiles, str):
configfiles = [configfiles] configfiles = [configfiles]
workflow = sm.Workflow(snakefile, overwrite_configfiles=configfiles, **kwargs) workflow = sm.Workflow(
snakefile, overwrite_configfiles=configfiles, rerun_triggers=[]
)
workflow.include(snakefile) workflow.include(snakefile)
if configfiles: if configfiles:

View File

@ -78,10 +78,13 @@ import shapely.prepared
import shapely.wkt import shapely.wkt
import yaml import yaml
from _helpers import configure_logging from _helpers import configure_logging
from packaging.version import Version, parse
from scipy import spatial from scipy import spatial
from scipy.sparse import csgraph from scipy.sparse import csgraph
from shapely.geometry import LineString, Point from shapely.geometry import LineString, Point
PD_GE_2_2 = parse(pd.__version__) >= Version("2.2")
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -524,12 +527,13 @@ def _set_countries_and_substations(n, config, country_shapes, offshore_shapes):
) )
return pd.Series(key, index) return pd.Series(key, index)
compat_kws = dict(include_groups=False) if PD_GE_2_2 else {}
gb = buses.loc[substation_b].groupby( gb = buses.loc[substation_b].groupby(
["x", "y"], as_index=False, group_keys=False, sort=False ["x", "y"], as_index=False, group_keys=False, sort=False
) )
bus_map_low = gb.apply(prefer_voltage, "min", include_groups=False) bus_map_low = gb.apply(prefer_voltage, "min", **compat_kws)
lv_b = (bus_map_low == bus_map_low.index).reindex(buses.index, fill_value=False) lv_b = (bus_map_low == bus_map_low.index).reindex(buses.index, fill_value=False)
bus_map_high = gb.apply(prefer_voltage, "max", include_groups=False) bus_map_high = gb.apply(prefer_voltage, "max", **compat_kws)
hv_b = (bus_map_high == bus_map_high.index).reindex(buses.index, fill_value=False) hv_b = (bus_map_high == bus_map_high.index).reindex(buses.index, fill_value=False)
onshore_b = pd.Series(False, buses.index) onshore_b = pd.Series(False, buses.index)

View File

@ -13,7 +13,6 @@ from itertools import product
import country_converter as coco import country_converter as coco
import geopandas as gpd import geopandas as gpd
import pandas as pd import pandas as pd
from packaging.version import Version, parse
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
cc = coco.CountryConverter() cc = coco.CountryConverter()
@ -84,12 +83,7 @@ def prepare_hotmaps_database(regions):
gdf = gpd.GeoDataFrame(df, geometry="coordinates", crs="EPSG:4326") gdf = gpd.GeoDataFrame(df, geometry="coordinates", crs="EPSG:4326")
kws = ( gdf = gpd.sjoin(gdf, regions, how="inner", predicate="within")
dict(op="within")
if parse(gpd.__version__) < Version("0.10")
else dict(predicate="within")
)
gdf = gpd.sjoin(gdf, regions, how="inner", **kws)
gdf.rename(columns={"index_right": "bus"}, inplace=True) gdf.rename(columns={"index_right": "bus"}, inplace=True)
gdf["country"] = gdf.bus.str[:2] gdf["country"] = gdf.bus.str[:2]

View File

@ -10,7 +10,6 @@ import logging
import geopandas as gpd import geopandas as gpd
import pandas as pd import pandas as pd
from packaging.version import Version, parse
from pypsa.geo import haversine_pts from pypsa.geo import haversine_pts
from shapely import wkt from shapely import wkt
@ -41,12 +40,9 @@ def build_clustered_gas_network(df, bus_regions, length_factor=1.25):
for i in [0, 1]: for i in [0, 1]:
gdf = gpd.GeoDataFrame(geometry=df[f"point{i}"], crs="EPSG:4326") gdf = gpd.GeoDataFrame(geometry=df[f"point{i}"], crs="EPSG:4326")
kws = ( bus_mapping = gpd.sjoin(
dict(op="within") gdf, bus_regions, how="left", predicate="within"
if parse(gpd.__version__) < Version("0.10") ).index_right
else dict(predicate="within")
)
bus_mapping = gpd.sjoin(gdf, bus_regions, how="left", **kws).index_right
bus_mapping = bus_mapping.groupby(bus_mapping.index).first() bus_mapping = bus_mapping.groupby(bus_mapping.index).first()
df[f"bus{i}"] = bus_mapping df[f"bus{i}"] = bus_mapping

View File

@ -135,6 +135,7 @@ import pypsa
import seaborn as sns import seaborn as sns
from _helpers import configure_logging, update_p_nom_max from _helpers import configure_logging, update_p_nom_max
from add_electricity import load_costs from add_electricity import load_costs
from packaging.version import Version, parse
from pypsa.clustering.spatial import ( from pypsa.clustering.spatial import (
busmap_by_greedy_modularity, busmap_by_greedy_modularity,
busmap_by_hac, busmap_by_hac,
@ -142,6 +143,8 @@ from pypsa.clustering.spatial import (
get_clustering_from_busmap, get_clustering_from_busmap,
) )
PD_GE_2_2 = parse(pd.__version__) >= Version("2.2")
warnings.filterwarnings(action="ignore", category=UserWarning) warnings.filterwarnings(action="ignore", category=UserWarning)
idx = pd.IndexSlice idx = pd.IndexSlice
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -362,9 +365,11 @@ def busmap_for_n_clusters(
f"`algorithm` must be one of 'kmeans' or 'hac'. Is {algorithm}." f"`algorithm` must be one of 'kmeans' or 'hac'. Is {algorithm}."
) )
compat_kws = dict(include_groups=False) if PD_GE_2_2 else {}
return ( return (
n.buses.groupby(["country", "sub_network"], group_keys=False) n.buses.groupby(["country", "sub_network"], group_keys=False)
.apply(busmap_for_country, include_groups=False) .apply(busmap_for_country, **compat_kws)
.squeeze() .squeeze()
.rename("busmap") .rename("busmap")
) )

View File

@ -23,15 +23,12 @@ from add_electricity import calculate_annuity, sanitize_carriers, sanitize_locat
from build_energy_totals import build_co2_totals, build_eea_co2, build_eurostat_co2 from build_energy_totals import build_co2_totals, build_eea_co2, build_eurostat_co2
from networkx.algorithms import complement from networkx.algorithms import complement
from networkx.algorithms.connectivity.edge_augmentation import k_edge_augmentation from networkx.algorithms.connectivity.edge_augmentation import k_edge_augmentation
from packaging.version import Version, parse
from pypsa.geo import haversine_pts from pypsa.geo import haversine_pts
from pypsa.io import import_components_from_dataframe from pypsa.io import import_components_from_dataframe
from scipy.stats import beta from scipy.stats import beta
spatial = SimpleNamespace() spatial = SimpleNamespace()
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
pd_version = parse(pd.__version__)
agg_group_kwargs = dict(numeric_only=False) if pd_version >= Version("1.3") else {}
def define_spatial(nodes, options): def define_spatial(nodes, options):
@ -1853,16 +1850,7 @@ def add_heat(n, costs):
p_nom_extendable=True, p_nom_extendable=True,
) )
if isinstance(options["tes_tau"], dict):
tes_time_constant_days = options["tes_tau"][name_type] tes_time_constant_days = options["tes_tau"][name_type]
else:
logger.warning(
"Deprecated: a future version will require you to specify 'tes_tau' ",
"for 'decentral' and 'central' separately.",
)
tes_time_constant_days = (
options["tes_tau"] if name_type == "decentral" else 180.0
)
n.madd( n.madd(
"Store", "Store",
@ -3404,7 +3392,7 @@ def cluster_heat_buses(n):
# cluster heat nodes # cluster heat nodes
# static dataframe # static dataframe
agg = define_clustering(df.columns, aggregate_dict) agg = define_clustering(df.columns, aggregate_dict)
df = df.groupby(level=0).agg(agg, **agg_group_kwargs) df = df.groupby(level=0).agg(agg, numeric_only=False)
# time-varying data # time-varying data
pnl = c.pnl pnl = c.pnl
agg = define_clustering(pd.Index(pnl.keys()), aggregate_dict) agg = define_clustering(pd.Index(pnl.keys()), aggregate_dict)
@ -3413,7 +3401,7 @@ def cluster_heat_buses(n):
def renamer(s): def renamer(s):
return s.replace("residential ", "").replace("services ", "") return s.replace("residential ", "").replace("services ", "")
pnl[k] = pnl[k].T.groupby(renamer).agg(agg[k], **agg_group_kwargs).T pnl[k] = pnl[k].T.groupby(renamer).agg(agg[k], numeric_only=False).T
# remove unclustered assets of service/residential # remove unclustered assets of service/residential
to_drop = c.df.index.difference(df.index) to_drop = c.df.index.difference(df.index)