Merge branch 'master' into methanol

This commit is contained in:
Fabian Neumann 2022-12-28 12:22:01 +01:00
commit 38fd51fca9
4 changed files with 61 additions and 73 deletions

View File

@ -162,34 +162,26 @@ else:
rule build_heat_demands: rule build_heat_demands:
input: input:
pop_layout_total="resources/pop_layout_total.nc", pop_layout="resources/pop_layout_{scope}.nc",
pop_layout_urban="resources/pop_layout_urban.nc",
pop_layout_rural="resources/pop_layout_rural.nc",
regions_onshore=pypsaeur("resources/regions_onshore_elec_s{simpl}_{clusters}.geojson") regions_onshore=pypsaeur("resources/regions_onshore_elec_s{simpl}_{clusters}.geojson")
output: output:
heat_demand_urban="resources/heat_demand_urban_elec_s{simpl}_{clusters}.nc", heat_demand="resources/heat_demand_{scope}_elec_s{simpl}_{clusters}.nc"
heat_demand_rural="resources/heat_demand_rural_elec_s{simpl}_{clusters}.nc",
heat_demand_total="resources/heat_demand_total_elec_s{simpl}_{clusters}.nc"
resources: mem_mb=20000 resources: mem_mb=20000
benchmark: "benchmarks/build_heat_demands/s{simpl}_{clusters}" threads: 8
benchmark: "benchmarks/build_heat_demands/{scope}_s{simpl}_{clusters}"
script: "scripts/build_heat_demand.py" script: "scripts/build_heat_demand.py"
rule build_temperature_profiles: rule build_temperature_profiles:
input: input:
pop_layout_total="resources/pop_layout_total.nc", pop_layout="resources/pop_layout_{scope}.nc",
pop_layout_urban="resources/pop_layout_urban.nc",
pop_layout_rural="resources/pop_layout_rural.nc",
regions_onshore=pypsaeur("resources/regions_onshore_elec_s{simpl}_{clusters}.geojson") regions_onshore=pypsaeur("resources/regions_onshore_elec_s{simpl}_{clusters}.geojson")
output: output:
temp_soil_total="resources/temp_soil_total_elec_s{simpl}_{clusters}.nc", temp_soil="resources/temp_soil_{scope}_elec_s{simpl}_{clusters}.nc",
temp_soil_rural="resources/temp_soil_rural_elec_s{simpl}_{clusters}.nc", temp_air="resources/temp_air_{scope}_elec_s{simpl}_{clusters}.nc",
temp_soil_urban="resources/temp_soil_urban_elec_s{simpl}_{clusters}.nc",
temp_air_total="resources/temp_air_total_elec_s{simpl}_{clusters}.nc",
temp_air_rural="resources/temp_air_rural_elec_s{simpl}_{clusters}.nc",
temp_air_urban="resources/temp_air_urban_elec_s{simpl}_{clusters}.nc"
resources: mem_mb=20000 resources: mem_mb=20000
benchmark: "benchmarks/build_temperature_profiles/s{simpl}_{clusters}" threads: 8
benchmark: "benchmarks/build_temperature_profiles/{scope}_s{simpl}_{clusters}"
script: "scripts/build_temperature_profiles.py" script: "scripts/build_temperature_profiles.py"
@ -215,16 +207,13 @@ rule build_cop_profiles:
rule build_solar_thermal_profiles: rule build_solar_thermal_profiles:
input: input:
pop_layout_total="resources/pop_layout_total.nc", pop_layout="resources/pop_layout_{scope}.nc",
pop_layout_urban="resources/pop_layout_urban.nc",
pop_layout_rural="resources/pop_layout_rural.nc",
regions_onshore=pypsaeur("resources/regions_onshore_elec_s{simpl}_{clusters}.geojson") regions_onshore=pypsaeur("resources/regions_onshore_elec_s{simpl}_{clusters}.geojson")
output: output:
solar_thermal_total="resources/solar_thermal_total_elec_s{simpl}_{clusters}.nc", solar_thermal="resources/solar_thermal_{scope}_elec_s{simpl}_{clusters}.nc",
solar_thermal_urban="resources/solar_thermal_urban_elec_s{simpl}_{clusters}.nc",
solar_thermal_rural="resources/solar_thermal_rural_elec_s{simpl}_{clusters}.nc"
resources: mem_mb=20000 resources: mem_mb=20000
benchmark: "benchmarks/build_solar_thermal_profiles/s{simpl}_{clusters}" threads: 16
benchmark: "benchmarks/build_solar_thermal_profiles/{scope}_s{simpl}_{clusters}"
script: "scripts/build_solar_thermal_profiles.py" script: "scripts/build_solar_thermal_profiles.py"

View File

@ -5,6 +5,7 @@ import atlite
import pandas as pd import pandas as pd
import xarray as xr import xarray as xr
import numpy as np import numpy as np
from dask.distributed import Client, LocalCluster
if __name__ == '__main__': if __name__ == '__main__':
if 'snakemake' not in globals(): if 'snakemake' not in globals():
@ -15,14 +16,9 @@ if __name__ == '__main__':
clusters=48, clusters=48,
) )
if 'snakemake' not in globals(): nprocesses = int(snakemake.threads)
from vresutils import Dict cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1)
import yaml client = Client(cluster, asynchronous=True)
snakemake = Dict()
with open('config.yaml') as f:
snakemake.config = yaml.safe_load(f)
snakemake.input = Dict()
snakemake.output = Dict()
time = pd.date_range(freq='h', **snakemake.config['snapshots']) time = pd.date_range(freq='h', **snakemake.config['snapshots'])
cutout_config = snakemake.config['atlite']['cutout'] cutout_config = snakemake.config['atlite']['cutout']
@ -33,14 +29,14 @@ if __name__ == '__main__':
I = cutout.indicatormatrix(clustered_regions) I = cutout.indicatormatrix(clustered_regions)
for area in ["rural", "urban", "total"]: pop_layout = xr.open_dataarray(snakemake.input.pop_layout)
pop_layout = xr.open_dataarray(snakemake.input[f'pop_layout_{area}']) stacked_pop = pop_layout.stack(spatial=('y', 'x'))
M = I.T.dot(np.diag(I.dot(stacked_pop)))
stacked_pop = pop_layout.stack(spatial=('y', 'x')) heat_demand = cutout.heat_demand(
M = I.T.dot(np.diag(I.dot(stacked_pop))) matrix=M.T, index=clustered_regions.index,
dask_kwargs=dict(scheduler=client),
show_progress=False)
heat_demand = cutout.heat_demand( heat_demand.to_netcdf(snakemake.output.heat_demand)
matrix=M.T, index=clustered_regions.index)
heat_demand.to_netcdf(snakemake.output[f"heat_demand_{area}"])

View File

@ -5,6 +5,7 @@ import atlite
import pandas as pd import pandas as pd
import xarray as xr import xarray as xr
import numpy as np import numpy as np
from dask.distributed import Client, LocalCluster
if __name__ == '__main__': if __name__ == '__main__':
if 'snakemake' not in globals(): if 'snakemake' not in globals():
@ -15,14 +16,9 @@ if __name__ == '__main__':
clusters=48, clusters=48,
) )
if 'snakemake' not in globals(): nprocesses = int(snakemake.threads)
from vresutils import Dict cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1)
import yaml client = Client(cluster, asynchronous=True)
snakemake = Dict()
with open('config.yaml') as f:
snakemake.config = yaml.safe_load(f)
snakemake.input = Dict()
snakemake.output = Dict()
config = snakemake.config['solar_thermal'] config = snakemake.config['solar_thermal']
@ -35,18 +31,18 @@ if __name__ == '__main__':
I = cutout.indicatormatrix(clustered_regions) I = cutout.indicatormatrix(clustered_regions)
for area in ["total", "rural", "urban"]: pop_layout = xr.open_dataarray(snakemake.input.pop_layout)
pop_layout = xr.open_dataarray(snakemake.input[f'pop_layout_{area}']) stacked_pop = pop_layout.stack(spatial=('y', 'x'))
M = I.T.dot(np.diag(I.dot(stacked_pop)))
stacked_pop = pop_layout.stack(spatial=('y', 'x')) nonzero_sum = M.sum(axis=0, keepdims=True)
M = I.T.dot(np.diag(I.dot(stacked_pop))) nonzero_sum[nonzero_sum == 0.] = 1.
M_tilde = M / nonzero_sum
nonzero_sum = M.sum(axis=0, keepdims=True) solar_thermal = cutout.solar_thermal(**config, matrix=M_tilde.T,
nonzero_sum[nonzero_sum == 0.] = 1. index=clustered_regions.index,
M_tilde = M / nonzero_sum dask_kwargs=dict(scheduler=client),
show_progress=False)
solar_thermal = cutout.solar_thermal(**config, matrix=M_tilde.T, solar_thermal.to_netcdf(snakemake.output.solar_thermal)
index=clustered_regions.index)
solar_thermal.to_netcdf(snakemake.output[f"solar_thermal_{area}"])

View File

@ -5,6 +5,7 @@ import atlite
import pandas as pd import pandas as pd
import xarray as xr import xarray as xr
import numpy as np import numpy as np
from dask.distributed import Client, LocalCluster
if __name__ == '__main__': if __name__ == '__main__':
if 'snakemake' not in globals(): if 'snakemake' not in globals():
@ -15,6 +16,10 @@ if __name__ == '__main__':
clusters=48, clusters=48,
) )
nprocesses = int(snakemake.threads)
cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1)
client = Client(cluster, asynchronous=True)
time = pd.date_range(freq='h', **snakemake.config['snapshots']) time = pd.date_range(freq='h', **snakemake.config['snapshots'])
cutout_config = snakemake.config['atlite']['cutout'] cutout_config = snakemake.config['atlite']['cutout']
cutout = atlite.Cutout(cutout_config).sel(time=time) cutout = atlite.Cutout(cutout_config).sel(time=time)
@ -24,23 +29,25 @@ if __name__ == '__main__':
I = cutout.indicatormatrix(clustered_regions) I = cutout.indicatormatrix(clustered_regions)
for area in ["total", "rural", "urban"]: pop_layout = xr.open_dataarray(snakemake.input.pop_layout)
pop_layout = xr.open_dataarray(snakemake.input[f'pop_layout_{area}']) stacked_pop = pop_layout.stack(spatial=('y', 'x'))
M = I.T.dot(np.diag(I.dot(stacked_pop)))
stacked_pop = pop_layout.stack(spatial=('y', 'x')) nonzero_sum = M.sum(axis=0, keepdims=True)
M = I.T.dot(np.diag(I.dot(stacked_pop))) nonzero_sum[nonzero_sum == 0.] = 1.
M_tilde = M / nonzero_sum
nonzero_sum = M.sum(axis=0, keepdims=True) temp_air = cutout.temperature(
nonzero_sum[nonzero_sum == 0.] = 1. matrix=M_tilde.T, index=clustered_regions.index,
M_tilde = M / nonzero_sum dask_kwargs=dict(scheduler=client),
show_progress=False)
temp_air = cutout.temperature( temp_air.to_netcdf(snakemake.output.temp_air)
matrix=M_tilde.T, index=clustered_regions.index)
temp_air.to_netcdf(snakemake.output[f"temp_air_{area}"]) temp_soil = cutout.soil_temperature(
matrix=M_tilde.T, index=clustered_regions.index,
dask_kwargs=dict(scheduler=client),
show_progress=False)
temp_soil = cutout.soil_temperature( temp_soil.to_netcdf(snakemake.output.temp_soil)
matrix=M_tilde.T, index=clustered_regions.index)
temp_soil.to_netcdf(snakemake.output[f"temp_soil_{area}"])