Merge pull request #825 from PyPSA/snakemake-8

Compatibility with `snakemake>=8`
This commit is contained in:
Fabian Neumann 2024-03-11 09:00:47 +01:00 committed by GitHub
commit 41e1dc5c82
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
12 changed files with 68 additions and 50 deletions

View File

@ -7,13 +7,11 @@ from shutil import copyfile, move, rmtree
from pathlib import Path
import yaml
from snakemake.remote.HTTP import RemoteProvider as HTTPRemoteProvider
from snakemake.utils import min_version
from scripts._helpers import path_provider
min_version("8.5")
min_version("7.7")
HTTP = HTTPRemoteProvider()
from scripts._helpers import path_provider
default_files = {
"config/config.default.yaml": "config/config.yaml",

View File

@ -813,8 +813,8 @@ solving:
cbc-default: {} # Used in CI
glpk-default: {} # Used in CI
mem: 30000 #memory in MB; 20 GB enough for 50+B+I+H2; 100 GB for 181+B+I+H2
walltime: "12:00:00"
mem_mb: 30000 #memory in MB; 20 GB enough for 50+B+I+H2; 100 GB for 181+B+I+H2
runtime: 6h #runtime in humanfriendly style https://humanfriendly.readthedocs.io/en/latest/
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#plotting
plotting:

View File

@ -10,6 +10,11 @@ Release Notes
Upcoming Release
================
* Upgrade to Snakemake v8.5+. This version is the new minimum version required.
To upgrade an existing environment, run ``conda install -c bioconda
snakemake-minimal">=8.5"`` and ``pip install snakemake-storage-plugin-http``
(https://github.com/PyPSA/pypsa-eur/pull/825).
* Corrected a bug leading to power plants operating after their DateOut
(https://github.com/PyPSA/pypsa-eur/pull/958). Added additional grouping years
before 1980.

View File

@ -20,8 +20,7 @@ dependencies:
- openpyxl!=3.1.1
- pycountry
- seaborn
# snakemake 8 introduced a number of breaking changes which the workflow has yet to be made compatible with
- snakemake-minimal>=7.7.0,<8.0.0
- snakemake-minimal>=8.5
- memory_profiler
- yaml
- pytables
@ -61,4 +60,7 @@ dependencies:
- pip:
- tsam>=2.3.1
- snakemake-storage-plugin-http
- snakemake-executor-plugin-slurm
- snakemake-executor-plugin-cluster-generic
- highspy

View File

@ -8,6 +8,11 @@ channels:
- bioconda
dependencies:
- python>=3.8
- snakemake-minimal>=7.7.0,<8.0.0
- pip
- snakemake-minimal>=8.5
- pandas>=2.1
- tqdm
- pip:
- snakemake-storage-plugin-http
- snakemake-executor-plugin-slurm
- snakemake-executor-plugin-cluster-generic

View File

@ -93,7 +93,7 @@ rule build_gas_network:
rule build_gas_input_locations:
input:
gem=HTTP.remote(
gem=storage(
"https://globalenergymonitor.org/wp-content/uploads/2023/07/Europe-Gas-Tracker-2023-03-v3.xlsx",
keep_local=True,
),
@ -293,7 +293,7 @@ rule build_biomass_potentials:
params:
biomass=config_provider("biomass"),
input:
enspreso_biomass=HTTP.remote(
enspreso_biomass=storage(
"https://zenodo.org/records/10356004/files/ENSPRESO_BIOMASS.xlsx",
keep_local=True,
),
@ -325,8 +325,8 @@ rule build_biomass_potentials:
rule build_biomass_transport_costs:
input:
transport_cost_data=HTTP.remote(
"publications.jrc.ec.europa.eu/repository/bitstream/JRC98626/biomass potentials in europe_web rev.pdf",
transport_cost_data=storage(
"https://publications.jrc.ec.europa.eu/repository/bitstream/JRC98626/biomass potentials in europe_web rev.pdf",
keep_local=True,
),
output:
@ -350,7 +350,7 @@ rule build_sequestration_potentials:
"sector", "regional_co2_sequestration_potential"
),
input:
sequestration_potential=HTTP.remote(
sequestration_potential=storage(
"https://raw.githubusercontent.com/ericzhou571/Co2Storage/main/resources/complete_map_2020_unit_Mt.geojson",
keep_local=True,
),

View File

@ -64,9 +64,8 @@ if config["enable"]["retrieve"] and config["enable"].get("retrieve_cutout", True
rule retrieve_cutout:
input:
HTTP.remote(
"zenodo.org/record/6382570/files/{cutout}.nc",
static=True,
storage(
"https://zenodo.org/record/6382570/files/{cutout}.nc",
),
output:
protected("cutouts/" + CDIR + "{cutout}.nc"),
@ -104,10 +103,9 @@ if config["enable"]["retrieve"] and config["enable"].get(
rule retrieve_natura_raster:
input:
HTTP.remote(
"zenodo.org/record/4706686/files/natura.tiff",
storage(
"https://zenodo.org/record/4706686/files/natura.tiff",
keep_local=True,
static=True,
),
output:
resources("natura.tiff"),
@ -200,10 +198,9 @@ if config["enable"]["retrieve"]:
rule retrieve_ship_raster:
input:
HTTP.remote(
storage(
"https://zenodo.org/record/6953563/files/shipdensity_global.zip",
keep_local=True,
static=True,
),
output:
protected("data/shipdensity_global.zip"),
@ -223,9 +220,8 @@ if config["enable"]["retrieve"]:
# Website: https://land.copernicus.eu/global/products/lc
rule download_copernicus_land_cover:
input:
HTTP.remote(
"zenodo.org/record/3939050/files/PROBAV_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif",
static=True,
storage(
"https://zenodo.org/record/3939050/files/PROBAV_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif",
),
output:
"data/Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif",
@ -240,9 +236,8 @@ if config["enable"]["retrieve"]:
# Website: https://ec.europa.eu/jrc/en/luisa
rule retrieve_luisa_land_cover:
input:
HTTP.remote(
"jeodpp.jrc.ec.europa.eu/ftp/jrc-opendata/LUISA/EUROPE/Basemaps/LandUse/2018/LATEST/LUISA_basemap_020321_50m.tif",
static=True,
storage(
"https://jeodpp.jrc.ec.europa.eu/ftp/jrc-opendata/LUISA/EUROPE/Basemaps/LandUse/2018/LATEST/LUISA_basemap_020321_50m.tif",
),
output:
"data/LUISA_basemap_020321_50m.tif",
@ -285,11 +280,7 @@ if config["enable"]["retrieve"]:
# Website: https://www.protectedplanet.net/en/thematic-areas/wdpa
rule download_wdpa:
input:
HTTP.remote(
url,
static=True,
keep_local=True,
),
storage(url, keep_local=True),
params:
zip="data/WDPA_shp.zip",
folder=directory("data/WDPA"),
@ -311,9 +302,8 @@ if config["enable"]["retrieve"]:
# extract the main zip and then merge the contained 3 zipped shapefiles
# Website: https://www.protectedplanet.net/en/thematic-areas/marine-protected-areas
input:
HTTP.remote(
f"d1gam3xoknrgr2.cloudfront.net/current/WDPA_WDOECM_{bYYYY}_Public_marine_shp.zip",
static=True,
storage(
f"https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_WDOECM_{bYYYY}_Public_marine_shp.zip",
keep_local=True,
),
params:
@ -336,10 +326,9 @@ if config["enable"]["retrieve"]:
rule retrieve_monthly_co2_prices:
input:
HTTP.remote(
storage(
"https://www.eex.com/fileadmin/EEX/Downloads/EUA_Emission_Spot_Primary_Market_Auction_Report/Archive_Reports/emission-spot-primary-market-auction-report-2019-data.xls",
keep_local=True,
static=True,
),
output:
"data/validation/emission-spot-primary-market-auction-report-2019-data.xls",

View File

@ -29,7 +29,7 @@ rule solve_network:
threads: solver_threads
resources:
mem_mb=memory,
walltime=config_provider("solving", "walltime", default="12:00:00"),
runtime=config_provider("solving", "runtime", default="6h"),
shadow:
"minimal"
conda:
@ -60,7 +60,7 @@ rule solve_operations_network:
threads: 4
resources:
mem_mb=(lambda w: 10000 + 372 * int(w.clusters)),
walltime=config_provider("solving", "walltime", default="12:00:00"),
runtime=config_provider("solving", "runtime", default="6h"),
shadow:
"minimal"
conda:

View File

@ -132,8 +132,8 @@ rule solve_sector_network_myopic:
+ "logs/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_python.log",
threads: solver_threads
resources:
mem_mb=config_provider("solving", "mem"),
walltime=config_provider("solving", "walltime", default="12:00:00"),
mem_mb=config_provider("solving", "mem_mb"),
runtime=config_provider("solving", "runtime", default="6h"),
benchmark:
(
RESULTS

View File

@ -30,8 +30,8 @@ rule solve_sector_network:
+ "logs/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}_python.log",
threads: solver_threads
resources:
mem_mb=config_provider("solving", "mem"),
walltime=config_provider("solving", "walltime", default="12:00:00"),
mem_mb=config_provider("solving", "mem_mb"),
runtime=config_provider("solving", "runtime", default="6h"),
benchmark:
(
RESULTS

View File

@ -354,7 +354,16 @@ def mock_snakemake(
import snakemake as sm
from pypsa.descriptors import Dict
from snakemake.api import Workflow
from snakemake.common import SNAKEFILE_CHOICES
from snakemake.script import Snakemake
from snakemake.settings import (
ConfigSettings,
DAGSettings,
ResourceSettings,
StorageSettings,
WorkflowSettings,
)
script_dir = Path(__file__).parent.resolve()
if root_dir is None:
@ -374,7 +383,7 @@ def mock_snakemake(
f" {root_dir} or scripts directory {script_dir}"
)
try:
for p in sm.SNAKEFILE_CHOICES:
for p in SNAKEFILE_CHOICES:
if os.path.exists(p):
snakefile = p
break
@ -383,8 +392,18 @@ def mock_snakemake(
elif isinstance(configfiles, str):
configfiles = [configfiles]
workflow = sm.Workflow(
snakefile, overwrite_configfiles=configfiles, rerun_triggers=[]
resource_settings = ResourceSettings()
config_settings = ConfigSettings(configfiles=configfiles)
workflow_settings = WorkflowSettings()
storage_settings = StorageSettings()
dag_settings = DAGSettings(rerun_triggers=[])
workflow = Workflow(
config_settings,
resource_settings,
workflow_settings,
storage_settings,
dag_settings,
storage_provider_settings=dict(),
)
workflow.include(snakefile)

View File

@ -25,7 +25,7 @@ def read_scigrid_gas(fn):
def build_gem_lng_data(fn):
df = pd.read_excel(fn[0], sheet_name="LNG terminals - data")
df = pd.read_excel(fn, sheet_name="LNG terminals - data")
df = df.set_index("ComboID")
remove_country = ["Cyprus", "Turkey"] # noqa: F841
@ -46,7 +46,7 @@ def build_gem_lng_data(fn):
def build_gem_prod_data(fn):
df = pd.read_excel(fn[0], sheet_name="Gas extraction - main")
df = pd.read_excel(fn, sheet_name="Gas extraction - main")
df = df.set_index("GEM Unit ID")
remove_country = ["Cyprus", "Türkiye"] # noqa: F841
@ -60,7 +60,7 @@ def build_gem_prod_data(fn):
& ~Longitude.isna()"
).copy()
p = pd.read_excel(fn[0], sheet_name="Gas extraction - production")
p = pd.read_excel(fn, sheet_name="Gas extraction - production")
p = p.set_index("GEM Unit ID")
p = p[p["Fuel description"] == "gas"]