commit
179ab43ef9
@ -11,7 +11,7 @@ dependencies:
|
||||
- pip
|
||||
|
||||
- atlite>=0.2.9
|
||||
- pypsa
|
||||
- pypsa>=0.26.0
|
||||
- linopy
|
||||
- dask
|
||||
|
||||
@ -27,9 +27,9 @@ dependencies:
|
||||
- lxml
|
||||
- powerplantmatching>=0.5.5
|
||||
- numpy
|
||||
- pandas>=1.4
|
||||
- pandas>=2.1
|
||||
- geopandas>=0.11.0
|
||||
- xarray
|
||||
- xarray>=2023.11.0
|
||||
- rioxarray
|
||||
- netcdf4
|
||||
- networkx
|
||||
@ -46,6 +46,7 @@ dependencies:
|
||||
- tabula-py
|
||||
- pyxlsb
|
||||
- graphviz
|
||||
- ipopt
|
||||
|
||||
# Keep in conda environment when calling ipython
|
||||
- ipython
|
||||
|
@ -227,6 +227,7 @@ if config["enable"]["retrieve"]:
|
||||
run:
|
||||
move(input[0], output[0])
|
||||
|
||||
|
||||
if config["enable"]["retrieve"]:
|
||||
|
||||
# Downloading Copernicus Global Land Cover for land cover and land use:
|
||||
@ -238,26 +239,29 @@ if config["enable"]["retrieve"]:
|
||||
static=True,
|
||||
),
|
||||
output:
|
||||
RESOURCES + "Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif",
|
||||
run: move(input[0], output[0])
|
||||
RESOURCES
|
||||
+ "Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif",
|
||||
run:
|
||||
move(input[0], output[0])
|
||||
|
||||
|
||||
if config["enable"]["retrieve"]:
|
||||
|
||||
current_month = datetime.now().strftime('%b')
|
||||
current_year = datetime.now().strftime('%Y')
|
||||
current_month = datetime.now().strftime("%b")
|
||||
current_year = datetime.now().strftime("%Y")
|
||||
bYYYY = f"{current_month}{current_year}"
|
||||
|
||||
def check_file_exists(url):
|
||||
response = requests.head(url)
|
||||
return response.status_code == 200
|
||||
|
||||
url = f'https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip'
|
||||
url = f"https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip"
|
||||
|
||||
if not check_file_exists(url):
|
||||
prev_month = (datetime.now()-timedelta(30)).strftime('%b')
|
||||
prev_month = (datetime.now() - timedelta(30)).strftime("%b")
|
||||
bYYYY = f"{prev_month}{current_year}"
|
||||
assert check_file_exists(f'https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip'), "The file does not exist."
|
||||
assert check_file_exists(
|
||||
f"https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip"
|
||||
), "The file does not exist."
|
||||
|
||||
# Downloading protected area database from WDPA
|
||||
# extract the main zip and then merge the contained 3 zipped shapefiles
|
||||
@ -268,7 +272,7 @@ if config["enable"]["retrieve"]:
|
||||
f"d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public_shp.zip",
|
||||
static=True,
|
||||
keep_local=True,
|
||||
)
|
||||
),
|
||||
params:
|
||||
zip=RESOURCES + f"WDPA_{bYYYY}_shp.zip",
|
||||
folder=directory(RESOURCES + f"WDPA_{bYYYY}"),
|
||||
@ -279,15 +283,16 @@ if config["enable"]["retrieve"]:
|
||||
shell("unzip -o {params.zip} -d {params.folder}")
|
||||
for i in range(3):
|
||||
# vsizip is special driver for directly working with zipped shapefiles in ogr2ogr
|
||||
layer_path = f"/vsizip/{params.folder}/WDPA_{bYYYY}_Public_shp_{i}.zip"
|
||||
layer_path = (
|
||||
f"/vsizip/{params.folder}/WDPA_{bYYYY}_Public_shp_{i}.zip"
|
||||
)
|
||||
print(f"Adding layer {i+1} of 3 to combined output file.")
|
||||
shell("ogr2ogr -f gpkg -update -append {output.gpkg} {layer_path}")
|
||||
|
||||
|
||||
rule download_wdpa_marine:
|
||||
# Downloading Marine protected area database from WDPA
|
||||
# extract the main zip and then merge the contained 3 zipped shapefiles
|
||||
# Website: https://www.protectedplanet.net/en/thematic-areas/marine-protected-areas
|
||||
rule download_wdpa_marine:
|
||||
input:
|
||||
HTTP.remote(
|
||||
f"d1gam3xoknrgr2.cloudfront.net/current/WDPA_WDOECM_{bYYYY}_Public_marine_shp.zip",
|
||||
@ -309,6 +314,7 @@ if config["enable"]["retrieve"]:
|
||||
shell("ogr2ogr -f gpkg -update -append {output.gpkg} {layer_path}")
|
||||
|
||||
|
||||
|
||||
if config["enable"]["retrieve"]:
|
||||
|
||||
rule retrieve_monthly_co2_prices:
|
||||
|
@ -560,7 +560,7 @@ def _set_countries_and_substations(n, config, country_shapes, offshore_shapes):
|
||||
~buses["under_construction"]
|
||||
)
|
||||
|
||||
c_nan_b = buses.country.isnull()
|
||||
c_nan_b = buses.country == "na"
|
||||
if c_nan_b.sum() > 0:
|
||||
c_tag = _get_country(buses.loc[c_nan_b])
|
||||
c_tag.loc[~c_tag.isin(countries)] = np.nan
|
||||
|
@ -263,7 +263,7 @@ if __name__ == "__main__":
|
||||
df.to_csv(snakemake.output.biomass_potentials_all)
|
||||
|
||||
grouper = {v: k for k, vv in params["classes"].items() for v in vv}
|
||||
df = df.groupby(grouper, axis=1).sum()
|
||||
df = df.T.groupby(grouper).sum().T
|
||||
|
||||
df *= 1e6 # TWh/a to MWh/a
|
||||
df.index.name = "MWh/a"
|
||||
|
@ -470,7 +470,7 @@ if __name__ == "__main__":
|
||||
n = pypsa.Network(snakemake.input.network)
|
||||
|
||||
# remove integer outputs for compatibility with PyPSA v0.26.0
|
||||
n.generators.drop("n_mod", axis=1, inplace=True, errors='ignore')
|
||||
n.generators.drop("n_mod", axis=1, inplace=True, errors="ignore")
|
||||
|
||||
exclude_carriers = params.cluster_network["exclude_carriers"]
|
||||
aggregate_carriers = set(n.generators.carrier) - set(exclude_carriers)
|
||||
|
@ -537,7 +537,7 @@ if __name__ == "__main__":
|
||||
Nyears = n.snapshot_weightings.objective.sum() / 8760
|
||||
|
||||
# remove integer outputs for compatibility with PyPSA v0.26.0
|
||||
n.generators.drop("n_mod", axis=1, inplace=True, errors='ignore')
|
||||
n.generators.drop("n_mod", axis=1, inplace=True, errors="ignore")
|
||||
|
||||
n, trafo_map = simplify_network_to_380(n)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user