commit
179ab43ef9
@ -11,7 +11,7 @@ dependencies:
|
|||||||
- pip
|
- pip
|
||||||
|
|
||||||
- atlite>=0.2.9
|
- atlite>=0.2.9
|
||||||
- pypsa
|
- pypsa>=0.26.0
|
||||||
- linopy
|
- linopy
|
||||||
- dask
|
- dask
|
||||||
|
|
||||||
@ -27,9 +27,9 @@ dependencies:
|
|||||||
- lxml
|
- lxml
|
||||||
- powerplantmatching>=0.5.5
|
- powerplantmatching>=0.5.5
|
||||||
- numpy
|
- numpy
|
||||||
- pandas>=1.4
|
- pandas>=2.1
|
||||||
- geopandas>=0.11.0
|
- geopandas>=0.11.0
|
||||||
- xarray
|
- xarray>=2023.11.0
|
||||||
- rioxarray
|
- rioxarray
|
||||||
- netcdf4
|
- netcdf4
|
||||||
- networkx
|
- networkx
|
||||||
@ -46,6 +46,7 @@ dependencies:
|
|||||||
- tabula-py
|
- tabula-py
|
||||||
- pyxlsb
|
- pyxlsb
|
||||||
- graphviz
|
- graphviz
|
||||||
|
- ipopt
|
||||||
|
|
||||||
# Keep in conda environment when calling ipython
|
# Keep in conda environment when calling ipython
|
||||||
- ipython
|
- ipython
|
||||||
|
@ -227,6 +227,7 @@ if config["enable"]["retrieve"]:
|
|||||||
run:
|
run:
|
||||||
move(input[0], output[0])
|
move(input[0], output[0])
|
||||||
|
|
||||||
|
|
||||||
if config["enable"]["retrieve"]:
|
if config["enable"]["retrieve"]:
|
||||||
|
|
||||||
# Downloading Copernicus Global Land Cover for land cover and land use:
|
# Downloading Copernicus Global Land Cover for land cover and land use:
|
||||||
@ -238,26 +239,29 @@ if config["enable"]["retrieve"]:
|
|||||||
static=True,
|
static=True,
|
||||||
),
|
),
|
||||||
output:
|
output:
|
||||||
RESOURCES + "Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif",
|
RESOURCES
|
||||||
run: move(input[0], output[0])
|
+ "Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif",
|
||||||
|
run:
|
||||||
|
move(input[0], output[0])
|
||||||
|
|
||||||
|
|
||||||
if config["enable"]["retrieve"]:
|
if config["enable"]["retrieve"]:
|
||||||
|
current_month = datetime.now().strftime("%b")
|
||||||
current_month = datetime.now().strftime('%b')
|
current_year = datetime.now().strftime("%Y")
|
||||||
current_year = datetime.now().strftime('%Y')
|
|
||||||
bYYYY = f"{current_month}{current_year}"
|
bYYYY = f"{current_month}{current_year}"
|
||||||
|
|
||||||
def check_file_exists(url):
|
def check_file_exists(url):
|
||||||
response = requests.head(url)
|
response = requests.head(url)
|
||||||
return response.status_code == 200
|
return response.status_code == 200
|
||||||
|
|
||||||
url = f'https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip'
|
url = f"https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip"
|
||||||
|
|
||||||
if not check_file_exists(url):
|
if not check_file_exists(url):
|
||||||
prev_month = (datetime.now()-timedelta(30)).strftime('%b')
|
prev_month = (datetime.now() - timedelta(30)).strftime("%b")
|
||||||
bYYYY = f"{prev_month}{current_year}"
|
bYYYY = f"{prev_month}{current_year}"
|
||||||
assert check_file_exists(f'https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip'), "The file does not exist."
|
assert check_file_exists(
|
||||||
|
f"https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip"
|
||||||
|
), "The file does not exist."
|
||||||
|
|
||||||
# Downloading protected area database from WDPA
|
# Downloading protected area database from WDPA
|
||||||
# extract the main zip and then merge the contained 3 zipped shapefiles
|
# extract the main zip and then merge the contained 3 zipped shapefiles
|
||||||
@ -268,7 +272,7 @@ if config["enable"]["retrieve"]:
|
|||||||
f"d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public_shp.zip",
|
f"d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public_shp.zip",
|
||||||
static=True,
|
static=True,
|
||||||
keep_local=True,
|
keep_local=True,
|
||||||
)
|
),
|
||||||
params:
|
params:
|
||||||
zip=RESOURCES + f"WDPA_{bYYYY}_shp.zip",
|
zip=RESOURCES + f"WDPA_{bYYYY}_shp.zip",
|
||||||
folder=directory(RESOURCES + f"WDPA_{bYYYY}"),
|
folder=directory(RESOURCES + f"WDPA_{bYYYY}"),
|
||||||
@ -279,15 +283,16 @@ if config["enable"]["retrieve"]:
|
|||||||
shell("unzip -o {params.zip} -d {params.folder}")
|
shell("unzip -o {params.zip} -d {params.folder}")
|
||||||
for i in range(3):
|
for i in range(3):
|
||||||
# vsizip is special driver for directly working with zipped shapefiles in ogr2ogr
|
# vsizip is special driver for directly working with zipped shapefiles in ogr2ogr
|
||||||
layer_path = f"/vsizip/{params.folder}/WDPA_{bYYYY}_Public_shp_{i}.zip"
|
layer_path = (
|
||||||
|
f"/vsizip/{params.folder}/WDPA_{bYYYY}_Public_shp_{i}.zip"
|
||||||
|
)
|
||||||
print(f"Adding layer {i+1} of 3 to combined output file.")
|
print(f"Adding layer {i+1} of 3 to combined output file.")
|
||||||
shell("ogr2ogr -f gpkg -update -append {output.gpkg} {layer_path}")
|
shell("ogr2ogr -f gpkg -update -append {output.gpkg} {layer_path}")
|
||||||
|
|
||||||
|
|
||||||
# Downloading Marine protected area database from WDPA
|
|
||||||
# extract the main zip and then merge the contained 3 zipped shapefiles
|
|
||||||
# Website: https://www.protectedplanet.net/en/thematic-areas/marine-protected-areas
|
|
||||||
rule download_wdpa_marine:
|
rule download_wdpa_marine:
|
||||||
|
# Downloading Marine protected area database from WDPA
|
||||||
|
# extract the main zip and then merge the contained 3 zipped shapefiles
|
||||||
|
# Website: https://www.protectedplanet.net/en/thematic-areas/marine-protected-areas
|
||||||
input:
|
input:
|
||||||
HTTP.remote(
|
HTTP.remote(
|
||||||
f"d1gam3xoknrgr2.cloudfront.net/current/WDPA_WDOECM_{bYYYY}_Public_marine_shp.zip",
|
f"d1gam3xoknrgr2.cloudfront.net/current/WDPA_WDOECM_{bYYYY}_Public_marine_shp.zip",
|
||||||
@ -309,6 +314,7 @@ if config["enable"]["retrieve"]:
|
|||||||
shell("ogr2ogr -f gpkg -update -append {output.gpkg} {layer_path}")
|
shell("ogr2ogr -f gpkg -update -append {output.gpkg} {layer_path}")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if config["enable"]["retrieve"]:
|
if config["enable"]["retrieve"]:
|
||||||
|
|
||||||
rule retrieve_monthly_co2_prices:
|
rule retrieve_monthly_co2_prices:
|
||||||
|
@ -560,7 +560,7 @@ def _set_countries_and_substations(n, config, country_shapes, offshore_shapes):
|
|||||||
~buses["under_construction"]
|
~buses["under_construction"]
|
||||||
)
|
)
|
||||||
|
|
||||||
c_nan_b = buses.country.isnull()
|
c_nan_b = buses.country == "na"
|
||||||
if c_nan_b.sum() > 0:
|
if c_nan_b.sum() > 0:
|
||||||
c_tag = _get_country(buses.loc[c_nan_b])
|
c_tag = _get_country(buses.loc[c_nan_b])
|
||||||
c_tag.loc[~c_tag.isin(countries)] = np.nan
|
c_tag.loc[~c_tag.isin(countries)] = np.nan
|
||||||
|
@ -263,7 +263,7 @@ if __name__ == "__main__":
|
|||||||
df.to_csv(snakemake.output.biomass_potentials_all)
|
df.to_csv(snakemake.output.biomass_potentials_all)
|
||||||
|
|
||||||
grouper = {v: k for k, vv in params["classes"].items() for v in vv}
|
grouper = {v: k for k, vv in params["classes"].items() for v in vv}
|
||||||
df = df.groupby(grouper, axis=1).sum()
|
df = df.T.groupby(grouper).sum().T
|
||||||
|
|
||||||
df *= 1e6 # TWh/a to MWh/a
|
df *= 1e6 # TWh/a to MWh/a
|
||||||
df.index.name = "MWh/a"
|
df.index.name = "MWh/a"
|
||||||
|
@ -470,7 +470,7 @@ if __name__ == "__main__":
|
|||||||
n = pypsa.Network(snakemake.input.network)
|
n = pypsa.Network(snakemake.input.network)
|
||||||
|
|
||||||
# remove integer outputs for compatibility with PyPSA v0.26.0
|
# remove integer outputs for compatibility with PyPSA v0.26.0
|
||||||
n.generators.drop("n_mod", axis=1, inplace=True, errors='ignore')
|
n.generators.drop("n_mod", axis=1, inplace=True, errors="ignore")
|
||||||
|
|
||||||
exclude_carriers = params.cluster_network["exclude_carriers"]
|
exclude_carriers = params.cluster_network["exclude_carriers"]
|
||||||
aggregate_carriers = set(n.generators.carrier) - set(exclude_carriers)
|
aggregate_carriers = set(n.generators.carrier) - set(exclude_carriers)
|
||||||
|
@ -537,7 +537,7 @@ if __name__ == "__main__":
|
|||||||
Nyears = n.snapshot_weightings.objective.sum() / 8760
|
Nyears = n.snapshot_weightings.objective.sum() / 8760
|
||||||
|
|
||||||
# remove integer outputs for compatibility with PyPSA v0.26.0
|
# remove integer outputs for compatibility with PyPSA v0.26.0
|
||||||
n.generators.drop("n_mod", axis=1, inplace=True, errors='ignore')
|
n.generators.drop("n_mod", axis=1, inplace=True, errors="ignore")
|
||||||
|
|
||||||
n, trafo_map = simplify_network_to_380(n)
|
n, trafo_map = simplify_network_to_380(n)
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user