Merge branch 'master' into perfect-foresight-fixes
This commit is contained in:
commit
f0ec1d6716
6
.github/workflows/ci.yaml
vendored
6
.github/workflows/ci.yaml
vendored
@ -81,11 +81,7 @@ jobs:
|
||||
key: data-cutouts-${{ env.WEEK }}-${{ env.DATA_CACHE_NUMBER }}
|
||||
|
||||
- name: Test snakemake workflow
|
||||
run: |
|
||||
snakemake -call solve_elec_networks --configfile config/test/config.electricity.yaml --rerun-triggers=mtime
|
||||
snakemake -call all --configfile config/test/config.overnight.yaml --rerun-triggers=mtime
|
||||
snakemake -call all --configfile config/test/config.myopic.yaml --rerun-triggers=mtime
|
||||
snakemake -call all --configfile config/test/config.perfect.yaml --rerun-triggers=mtime
|
||||
run: ./test.sh
|
||||
|
||||
- name: Upload artifacts
|
||||
uses: actions/upload-artifact@v4.3.0
|
||||
|
@ -31,7 +31,12 @@ CDIR = RDIR if not run.get("shared_cutouts") else ""
|
||||
|
||||
LOGS = "logs/" + RDIR
|
||||
BENCHMARKS = "benchmarks/" + RDIR
|
||||
RESOURCES = "resources/" + RDIR if not run.get("shared_resources") else "resources/"
|
||||
if not (shared_resources := run.get("shared_resources")):
|
||||
RESOURCES = "resources/" + RDIR
|
||||
elif isinstance(shared_resources, str):
|
||||
RESOURCES = "resources/" + shared_resources + "/"
|
||||
else:
|
||||
RESOURCES = "resources/"
|
||||
RESULTS = "results/" + RDIR
|
||||
|
||||
|
||||
|
@ -8,14 +8,14 @@ tutorial: true
|
||||
run:
|
||||
name: "test-elec" # use this to keep track of runs with different settings
|
||||
disable_progressbar: true
|
||||
shared_resources: true
|
||||
shared_resources: "test"
|
||||
shared_cutouts: true
|
||||
|
||||
scenario:
|
||||
clusters:
|
||||
- 5
|
||||
opts:
|
||||
- Co2L-24H
|
||||
- Co2L-24h
|
||||
|
||||
countries: ['BE']
|
||||
|
||||
|
@ -7,7 +7,7 @@ tutorial: true
|
||||
run:
|
||||
name: "test-sector-myopic"
|
||||
disable_progressbar: true
|
||||
shared_resources: true
|
||||
shared_resources: "test"
|
||||
shared_cutouts: true
|
||||
|
||||
foresight: myopic
|
||||
@ -18,7 +18,7 @@ scenario:
|
||||
clusters:
|
||||
- 5
|
||||
sector_opts:
|
||||
- 24H-T-H-B-I-A-dist1
|
||||
- 24h-T-H-B-I-A-dist1
|
||||
planning_horizons:
|
||||
- 2030
|
||||
- 2040
|
||||
|
@ -7,7 +7,7 @@ tutorial: true
|
||||
run:
|
||||
name: "test-sector-overnight"
|
||||
disable_progressbar: true
|
||||
shared_resources: true
|
||||
shared_resources: "test"
|
||||
shared_cutouts: true
|
||||
|
||||
|
||||
@ -17,7 +17,7 @@ scenario:
|
||||
clusters:
|
||||
- 5
|
||||
sector_opts:
|
||||
- CO2L0-24H-T-H-B-I-A-dist1
|
||||
- CO2L0-24h-T-H-B-I-A-dist1
|
||||
planning_horizons:
|
||||
- 2030
|
||||
|
||||
|
@ -7,7 +7,7 @@ tutorial: true
|
||||
run:
|
||||
name: "test-sector-perfect"
|
||||
disable_progressbar: true
|
||||
shared_resources: true
|
||||
shared_resources: "test"
|
||||
shared_cutouts: true
|
||||
|
||||
foresight: perfect
|
||||
|
@ -66,6 +66,10 @@ Upcoming Release
|
||||
|
||||
* Various minor bugfixes to the perfect foresight workflow, though perfect foresight must still be considered experimental.
|
||||
|
||||
* It is now possible to determine the directory for shared resources by setting `shared_resources` to a string.
|
||||
|
||||
* A ``test.sh`` script was added to the repository to run the tests locally.
|
||||
|
||||
* Default settings for recycling rates and primary product shares of high-value
|
||||
chemicals have been set in accordance with the values used in `Neumann et al.
|
||||
(2023) <https://doi.org/10.1016/j.joule.2023.06.016>`_ linearly interpolated
|
||||
@ -74,6 +78,7 @@ Upcoming Release
|
||||
<https://static.agora-energiewende.de/fileadmin/Projekte/2021/2021_02_EU_CEAP/A-EW_254_Mobilising-circular-economy_study_WEB.pdf>`_.
|
||||
|
||||
|
||||
|
||||
PyPSA-Eur 0.9.0 (5th January 2024)
|
||||
==================================
|
||||
|
||||
|
@ -54,6 +54,7 @@ if config["foresight"] != "perfect":
|
||||
rule plot_hydrogen_network:
|
||||
params:
|
||||
plotting=config["plotting"],
|
||||
foresight=config["foresight"],
|
||||
input:
|
||||
network=RESULTS
|
||||
+ "postnetworks/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.nc",
|
||||
|
@ -327,7 +327,9 @@ def attach_load(n, regions, load, nuts3_shapes, ua_md_gdp, countries, scaling=1.
|
||||
axis=1,
|
||||
)
|
||||
|
||||
n.madd("Load", substation_lv_i, bus=substation_lv_i, p_set=load)
|
||||
n.madd(
|
||||
"Load", substation_lv_i, bus=substation_lv_i, p_set=load
|
||||
) # carrier="electricity"
|
||||
|
||||
|
||||
def update_transmission_costs(n, costs, length_factor=1.0):
|
||||
@ -504,8 +506,8 @@ def attach_conventional_generators(
|
||||
snakemake.input[f"conventional_{carrier}_{attr}"], index_col=0
|
||||
).iloc[:, 0]
|
||||
bus_values = n.buses.country.map(values)
|
||||
n.generators[attr].update(
|
||||
n.generators.loc[idx].bus.map(bus_values).dropna()
|
||||
n.generators.update(
|
||||
{attr: n.generators.loc[idx].bus.map(bus_values).dropna()}
|
||||
)
|
||||
else:
|
||||
# Single value affecting all generators of technology k indiscriminantely of country
|
||||
@ -749,8 +751,8 @@ def attach_OPSD_renewables(n: pypsa.Network, tech_map: Dict[str, List[str]]) ->
|
||||
caps = caps.groupby(["bus"]).Capacity.sum()
|
||||
caps = caps / gens_per_bus.reindex(caps.index, fill_value=1)
|
||||
|
||||
n.generators.p_nom.update(gens.bus.map(caps).dropna())
|
||||
n.generators.p_nom_min.update(gens.bus.map(caps).dropna())
|
||||
n.generators.update({"p_nom": gens.bus.map(caps).dropna()})
|
||||
n.generators.update({"p_nom_min": gens.bus.map(caps).dropna()})
|
||||
|
||||
|
||||
def estimate_renewable_capacities(
|
||||
|
@ -48,7 +48,7 @@ def add_build_year_to_new_assets(n, baseyear):
|
||||
"series"
|
||||
) & n.component_attrs[c.name].status.str.contains("Input")
|
||||
for attr in n.component_attrs[c.name].index[selection]:
|
||||
c.pnl[attr].rename(columns=rename, inplace=True)
|
||||
c.pnl[attr] = c.pnl[attr].rename(columns=rename)
|
||||
|
||||
|
||||
def add_existing_renewables(df_agg):
|
||||
|
@ -138,7 +138,9 @@ def _load_buses_from_eg(eg_buses, europe_shape, config_elec):
|
||||
)
|
||||
|
||||
buses["carrier"] = buses.pop("dc").map({True: "DC", False: "AC"})
|
||||
buses["under_construction"] = buses["under_construction"].fillna(False).astype(bool)
|
||||
buses["under_construction"] = buses.under_construction.where(
|
||||
lambda s: s.notnull(), False
|
||||
).astype(bool)
|
||||
|
||||
# remove all buses outside of all countries including exclusive economic zones (offshore)
|
||||
europe_shape = gpd.read_file(europe_shape).loc[0, "geometry"]
|
||||
@ -525,9 +527,9 @@ def _set_countries_and_substations(n, config, country_shapes, offshore_shapes):
|
||||
gb = buses.loc[substation_b].groupby(
|
||||
["x", "y"], as_index=False, group_keys=False, sort=False
|
||||
)
|
||||
bus_map_low = gb.apply(prefer_voltage, "min")
|
||||
bus_map_low = gb.apply(prefer_voltage, "min", include_groups=False)
|
||||
lv_b = (bus_map_low == bus_map_low.index).reindex(buses.index, fill_value=False)
|
||||
bus_map_high = gb.apply(prefer_voltage, "max")
|
||||
bus_map_high = gb.apply(prefer_voltage, "max", include_groups=False)
|
||||
hv_b = (bus_map_high == bus_map_high.index).reindex(buses.index, fill_value=False)
|
||||
|
||||
onshore_b = pd.Series(False, buses.index)
|
||||
|
@ -132,14 +132,14 @@ def disaggregate_nuts0(bio):
|
||||
pop = build_nuts_population_data()
|
||||
|
||||
# get population in nuts2
|
||||
pop_nuts2 = pop.loc[pop.index.str.len() == 4]
|
||||
pop_nuts2 = pop.loc[pop.index.str.len() == 4].copy()
|
||||
by_country = pop_nuts2.total.groupby(pop_nuts2.ct).sum()
|
||||
pop_nuts2.loc[:, "fraction"] = pop_nuts2.total / pop_nuts2.ct.map(by_country)
|
||||
pop_nuts2["fraction"] = pop_nuts2.total / pop_nuts2.ct.map(by_country)
|
||||
|
||||
# distribute nuts0 data to nuts2 by population
|
||||
bio_nodal = bio.loc[pop_nuts2.ct]
|
||||
bio_nodal.index = pop_nuts2.index
|
||||
bio_nodal = bio_nodal.mul(pop_nuts2.fraction, axis=0)
|
||||
bio_nodal = bio_nodal.mul(pop_nuts2.fraction, axis=0).astype(float)
|
||||
|
||||
# update inplace
|
||||
bio.update(bio_nodal)
|
||||
|
@ -114,12 +114,10 @@ def prepare_dataset(
|
||||
df["p_nom_diameter"] = df.diameter_mm.apply(diameter_to_capacity)
|
||||
ratio = df.p_nom / df.p_nom_diameter
|
||||
not_nordstream = df.max_pressure_bar < 220
|
||||
df.p_nom.update(
|
||||
df.p_nom_diameter.where(
|
||||
(df.p_nom <= 500)
|
||||
| ((ratio > correction_threshold_p_nom) & not_nordstream)
|
||||
| ((ratio < 1 / correction_threshold_p_nom) & not_nordstream)
|
||||
)
|
||||
df["p_nom"] = df.p_nom_diameter.where(
|
||||
(df.p_nom <= 500)
|
||||
| ((ratio > correction_threshold_p_nom) & not_nordstream)
|
||||
| ((ratio < 1 / correction_threshold_p_nom) & not_nordstream)
|
||||
)
|
||||
|
||||
# lines which have way too discrepant line lengths
|
||||
@ -130,12 +128,10 @@ def prepare_dataset(
|
||||
axis=1,
|
||||
)
|
||||
ratio = df.eval("length / length_haversine")
|
||||
df["length"].update(
|
||||
df.length_haversine.where(
|
||||
(df["length"] < 20)
|
||||
| (ratio > correction_threshold_length)
|
||||
| (ratio < 1 / correction_threshold_length)
|
||||
)
|
||||
df["length"] = df.length_haversine.where(
|
||||
(df["length"] < 20)
|
||||
| (ratio > correction_threshold_length)
|
||||
| (ratio < 1 / correction_threshold_length)
|
||||
)
|
||||
|
||||
return df
|
||||
|
@ -98,7 +98,7 @@ def calculate_line_rating(n, cutout):
|
||||
-------
|
||||
xarray DataArray object with maximal power.
|
||||
"""
|
||||
relevant_lines = n.lines[~n.lines["underground"]]
|
||||
relevant_lines = n.lines[~n.lines["underground"]].copy()
|
||||
buses = relevant_lines[["bus0", "bus1"]].values
|
||||
x = n.buses.x
|
||||
y = n.buses.y
|
||||
|
@ -83,7 +83,8 @@ if __name__ == "__main__":
|
||||
|
||||
# correct for imprecision of Iinv*I
|
||||
pop_ct = nuts3.loc[nuts3.country == ct, "pop"].sum()
|
||||
pop_cells_ct *= pop_ct / pop_cells_ct.sum()
|
||||
if pop_cells_ct.sum() != 0:
|
||||
pop_cells_ct *= pop_ct / pop_cells_ct.sum()
|
||||
|
||||
# The first low density grid cells to reach rural fraction are rural
|
||||
asc_density_i = density_cells_ct.sort_values().index
|
||||
|
@ -297,8 +297,8 @@ def prepare_building_stock_data():
|
||||
errors="ignore",
|
||||
)
|
||||
|
||||
u_values.subsector.replace(rename_sectors, inplace=True)
|
||||
u_values.btype.replace(rename_sectors, inplace=True)
|
||||
u_values["subsector"] = u_values.subsector.replace(rename_sectors)
|
||||
u_values["btype"] = u_values.btype.replace(rename_sectors)
|
||||
|
||||
# for missing weighting of surfaces of building types assume MFH
|
||||
u_values["assumed_subsector"] = u_values.subsector
|
||||
@ -306,8 +306,8 @@ def prepare_building_stock_data():
|
||||
~u_values.subsector.isin(rename_sectors.values()), "assumed_subsector"
|
||||
] = "MFH"
|
||||
|
||||
u_values.country_code.replace({"UK": "GB"}, inplace=True)
|
||||
u_values.bage.replace({"Berfore 1945": "Before 1945"}, inplace=True)
|
||||
u_values["country_code"] = u_values.country_code.replace({"UK": "GB"})
|
||||
u_values["bage"] = u_values.bage.replace({"Berfore 1945": "Before 1945"})
|
||||
u_values = u_values[~u_values.bage.isna()]
|
||||
|
||||
u_values.set_index(["country_code", "subsector", "bage", "type"], inplace=True)
|
||||
|
@ -488,7 +488,9 @@ if __name__ == "__main__":
|
||||
gens.efficiency, bins=[0, low, high, 1], labels=labels
|
||||
).astype(str)
|
||||
carriers += [f"{c} {label} efficiency" for label in labels]
|
||||
n.generators.carrier.update(gens.carrier + " " + suffix + " efficiency")
|
||||
n.generators.update(
|
||||
{"carrier": gens.carrier + " " + suffix + " efficiency"}
|
||||
)
|
||||
aggregate_carriers = carriers
|
||||
|
||||
if n_clusters == len(n.buses):
|
||||
|
@ -98,7 +98,7 @@ def plot_map(
|
||||
|
||||
logger.debug(f"{comp}, {costs}")
|
||||
|
||||
costs = costs.groupby(costs.columns, axis=1).sum()
|
||||
costs = costs.T.groupby(costs.columns).sum().T
|
||||
|
||||
costs.drop(list(costs.columns[(costs == 0.0).all()]), axis=1, inplace=True)
|
||||
|
||||
|
@ -269,8 +269,8 @@ def set_line_nom_max(
|
||||
hvdc = n.links.index[n.links.carrier == "DC"]
|
||||
n.links.loc[hvdc, "p_nom_max"] = n.links.loc[hvdc, "p_nom"] + p_nom_max_ext
|
||||
|
||||
n.lines.s_nom_max.clip(upper=s_nom_max_set, inplace=True)
|
||||
n.links.p_nom_max.clip(upper=p_nom_max_set, inplace=True)
|
||||
n.lines["s_nom_max"] = n.lines.s_nom_max.clip(upper=s_nom_max_set)
|
||||
n.links["p_nom_max"] = n.links.p_nom_max.clip(upper=p_nom_max_set)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@ -418,7 +418,7 @@ def add_CCL_constraints(n, config):
|
||||
Example
|
||||
-------
|
||||
scenario:
|
||||
opts: [Co2L-CCL-24H]
|
||||
opts: [Co2L-CCL-24h]
|
||||
electricity:
|
||||
agg_p_nom_limits: data/agg_p_nom_minmax.csv
|
||||
"""
|
||||
@ -463,7 +463,7 @@ def add_EQ_constraints(n, o, scaling=1e-1):
|
||||
Example
|
||||
-------
|
||||
scenario:
|
||||
opts: [Co2L-EQ0.7-24H]
|
||||
opts: [Co2L-EQ0.7-24h]
|
||||
|
||||
Require each country or node to on average produce a minimal share
|
||||
of its total electricity consumption itself. Example: EQ0.7c demands each country
|
||||
@ -527,7 +527,7 @@ def add_BAU_constraints(n, config):
|
||||
Example
|
||||
-------
|
||||
scenario:
|
||||
opts: [Co2L-BAU-24H]
|
||||
opts: [Co2L-BAU-24h]
|
||||
electricity:
|
||||
BAU_mincapacities:
|
||||
solar: 0
|
||||
@ -564,7 +564,7 @@ def add_SAFE_constraints(n, config):
|
||||
config.yaml requires to specify opts:
|
||||
|
||||
scenario:
|
||||
opts: [Co2L-SAFE-24H]
|
||||
opts: [Co2L-SAFE-24h]
|
||||
electricity:
|
||||
SAFE_reservemargin: 0.1
|
||||
Which sets a reserve margin of 10% above the peak demand.
|
||||
|
8
test.sh
Executable file
8
test.sh
Executable file
@ -0,0 +1,8 @@
|
||||
# SPDX-FileCopyrightText: : 2021-2024 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: CC0-1.0
|
||||
|
||||
snakemake -call solve_elec_networks --configfile config/test/config.electricity.yaml --rerun-triggers=mtime && \
|
||||
snakemake -call all --configfile config/test/config.overnight.yaml --rerun-triggers=mtime && \
|
||||
snakemake -call all --configfile config/test/config.myopic.yaml --rerun-triggers=mtime && \
|
||||
snakemake -call all --configfile config/test/config.perfect.yaml --rerun-triggers=mtime
|
Loading…
Reference in New Issue
Block a user