Merge branch 'master' into retro-updated

This commit is contained in:
Tom Brown 2021-04-30 18:12:20 +02:00 committed by GitHub
commit 8bfb1490c1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
18 changed files with 206 additions and 214 deletions

196
Snakefile
View File

@ -51,9 +51,9 @@ rule build_clustered_population_layouts:
pop_layout_total="resources/pop_layout_total.nc", pop_layout_total="resources/pop_layout_total.nc",
pop_layout_urban="resources/pop_layout_urban.nc", pop_layout_urban="resources/pop_layout_urban.nc",
pop_layout_rural="resources/pop_layout_rural.nc", pop_layout_rural="resources/pop_layout_rural.nc",
regions_onshore=pypsaeur('resources/regions_onshore_{network}_s{simpl}_{clusters}.geojson') regions_onshore=pypsaeur('resources/regions_onshore_elec_s{simpl}_{clusters}.geojson')
output: output:
clustered_pop_layout="resources/pop_layout_{network}_s{simpl}_{clusters}.csv" clustered_pop_layout="resources/pop_layout_elec_s{simpl}_{clusters}.csv"
resources: mem_mb=10000 resources: mem_mb=10000
script: "scripts/build_clustered_population_layouts.py" script: "scripts/build_clustered_population_layouts.py"
@ -63,9 +63,9 @@ rule build_simplified_population_layouts:
pop_layout_total="resources/pop_layout_total.nc", pop_layout_total="resources/pop_layout_total.nc",
pop_layout_urban="resources/pop_layout_urban.nc", pop_layout_urban="resources/pop_layout_urban.nc",
pop_layout_rural="resources/pop_layout_rural.nc", pop_layout_rural="resources/pop_layout_rural.nc",
regions_onshore=pypsaeur('resources/regions_onshore_{network}_s{simpl}.geojson') regions_onshore=pypsaeur('resources/regions_onshore_elec_s{simpl}.geojson')
output: output:
clustered_pop_layout="resources/pop_layout_{network}_s{simpl}.csv" clustered_pop_layout="resources/pop_layout_elec_s{simpl}.csv"
resources: mem_mb=10000 resources: mem_mb=10000
script: "scripts/build_clustered_population_layouts.py" script: "scripts/build_clustered_population_layouts.py"
@ -75,11 +75,11 @@ rule build_heat_demands:
pop_layout_total="resources/pop_layout_total.nc", pop_layout_total="resources/pop_layout_total.nc",
pop_layout_urban="resources/pop_layout_urban.nc", pop_layout_urban="resources/pop_layout_urban.nc",
pop_layout_rural="resources/pop_layout_rural.nc", pop_layout_rural="resources/pop_layout_rural.nc",
regions_onshore=pypsaeur("resources/regions_onshore_{network}_s{simpl}_{clusters}.geojson") regions_onshore=pypsaeur("resources/regions_onshore_elec_s{simpl}_{clusters}.geojson")
output: output:
heat_demand_urban="resources/heat_demand_urban_{network}_s{simpl}_{clusters}.nc", heat_demand_urban="resources/heat_demand_urban_elec_s{simpl}_{clusters}.nc",
heat_demand_rural="resources/heat_demand_rural_{network}_s{simpl}_{clusters}.nc", heat_demand_rural="resources/heat_demand_rural_elec_s{simpl}_{clusters}.nc",
heat_demand_total="resources/heat_demand_total_{network}_s{simpl}_{clusters}.nc" heat_demand_total="resources/heat_demand_total_elec_s{simpl}_{clusters}.nc"
resources: mem_mb=20000 resources: mem_mb=20000
script: "scripts/build_heat_demand.py" script: "scripts/build_heat_demand.py"
@ -88,33 +88,33 @@ rule build_temperature_profiles:
pop_layout_total="resources/pop_layout_total.nc", pop_layout_total="resources/pop_layout_total.nc",
pop_layout_urban="resources/pop_layout_urban.nc", pop_layout_urban="resources/pop_layout_urban.nc",
pop_layout_rural="resources/pop_layout_rural.nc", pop_layout_rural="resources/pop_layout_rural.nc",
regions_onshore=pypsaeur("resources/regions_onshore_{network}_s{simpl}_{clusters}.geojson") regions_onshore=pypsaeur("resources/regions_onshore_elec_s{simpl}_{clusters}.geojson")
output: output:
temp_soil_total="resources/temp_soil_total_{network}_s{simpl}_{clusters}.nc", temp_soil_total="resources/temp_soil_total_elec_s{simpl}_{clusters}.nc",
temp_soil_rural="resources/temp_soil_rural_{network}_s{simpl}_{clusters}.nc", temp_soil_rural="resources/temp_soil_rural_elec_s{simpl}_{clusters}.nc",
temp_soil_urban="resources/temp_soil_urban_{network}_s{simpl}_{clusters}.nc", temp_soil_urban="resources/temp_soil_urban_elec_s{simpl}_{clusters}.nc",
temp_air_total="resources/temp_air_total_{network}_s{simpl}_{clusters}.nc", temp_air_total="resources/temp_air_total_elec_s{simpl}_{clusters}.nc",
temp_air_rural="resources/temp_air_rural_{network}_s{simpl}_{clusters}.nc", temp_air_rural="resources/temp_air_rural_elec_s{simpl}_{clusters}.nc",
temp_air_urban="resources/temp_air_urban_{network}_s{simpl}_{clusters}.nc" temp_air_urban="resources/temp_air_urban_elec_s{simpl}_{clusters}.nc"
resources: mem_mb=20000 resources: mem_mb=20000
script: "scripts/build_temperature_profiles.py" script: "scripts/build_temperature_profiles.py"
rule build_cop_profiles: rule build_cop_profiles:
input: input:
temp_soil_total="resources/temp_soil_total_{network}_s{simpl}_{clusters}.nc", temp_soil_total="resources/temp_soil_total_elec_s{simpl}_{clusters}.nc",
temp_soil_rural="resources/temp_soil_rural_{network}_s{simpl}_{clusters}.nc", temp_soil_rural="resources/temp_soil_rural_elec_s{simpl}_{clusters}.nc",
temp_soil_urban="resources/temp_soil_urban_{network}_s{simpl}_{clusters}.nc", temp_soil_urban="resources/temp_soil_urban_elec_s{simpl}_{clusters}.nc",
temp_air_total="resources/temp_air_total_{network}_s{simpl}_{clusters}.nc", temp_air_total="resources/temp_air_total_elec_s{simpl}_{clusters}.nc",
temp_air_rural="resources/temp_air_rural_{network}_s{simpl}_{clusters}.nc", temp_air_rural="resources/temp_air_rural_elec_s{simpl}_{clusters}.nc",
temp_air_urban="resources/temp_air_urban_{network}_s{simpl}_{clusters}.nc" temp_air_urban="resources/temp_air_urban_elec_s{simpl}_{clusters}.nc"
output: output:
cop_soil_total="resources/cop_soil_total_{network}_s{simpl}_{clusters}.nc", cop_soil_total="resources/cop_soil_total_elec_s{simpl}_{clusters}.nc",
cop_soil_rural="resources/cop_soil_rural_{network}_s{simpl}_{clusters}.nc", cop_soil_rural="resources/cop_soil_rural_elec_s{simpl}_{clusters}.nc",
cop_soil_urban="resources/cop_soil_urban_{network}_s{simpl}_{clusters}.nc", cop_soil_urban="resources/cop_soil_urban_elec_s{simpl}_{clusters}.nc",
cop_air_total="resources/cop_air_total_{network}_s{simpl}_{clusters}.nc", cop_air_total="resources/cop_air_total_elec_s{simpl}_{clusters}.nc",
cop_air_rural="resources/cop_air_rural_{network}_s{simpl}_{clusters}.nc", cop_air_rural="resources/cop_air_rural_elec_s{simpl}_{clusters}.nc",
cop_air_urban="resources/cop_air_urban_{network}_s{simpl}_{clusters}.nc" cop_air_urban="resources/cop_air_urban_elec_s{simpl}_{clusters}.nc"
resources: mem_mb=20000 resources: mem_mb=20000
script: "scripts/build_cop_profiles.py" script: "scripts/build_cop_profiles.py"
@ -124,11 +124,11 @@ rule build_solar_thermal_profiles:
pop_layout_total="resources/pop_layout_total.nc", pop_layout_total="resources/pop_layout_total.nc",
pop_layout_urban="resources/pop_layout_urban.nc", pop_layout_urban="resources/pop_layout_urban.nc",
pop_layout_rural="resources/pop_layout_rural.nc", pop_layout_rural="resources/pop_layout_rural.nc",
regions_onshore=pypsaeur("resources/regions_onshore_{network}_s{simpl}_{clusters}.geojson") regions_onshore=pypsaeur("resources/regions_onshore_elec_s{simpl}_{clusters}.geojson")
output: output:
solar_thermal_total="resources/solar_thermal_total_{network}_s{simpl}_{clusters}.nc", solar_thermal_total="resources/solar_thermal_total_elec_s{simpl}_{clusters}.nc",
solar_thermal_urban="resources/solar_thermal_urban_{network}_s{simpl}_{clusters}.nc", solar_thermal_urban="resources/solar_thermal_urban_elec_s{simpl}_{clusters}.nc",
solar_thermal_rural="resources/solar_thermal_rural_{network}_s{simpl}_{clusters}.nc" solar_thermal_rural="resources/solar_thermal_rural_elec_s{simpl}_{clusters}.nc"
resources: mem_mb=20000 resources: mem_mb=20000
script: "scripts/build_solar_thermal_profiles.py" script: "scripts/build_solar_thermal_profiles.py"
@ -199,12 +199,12 @@ rule build_industrial_production_per_country_tomorrow:
rule build_industrial_distribution_key: rule build_industrial_distribution_key:
input: input:
clustered_pop_layout="resources/pop_layout_{network}_s{simpl}_{clusters}.csv", clustered_pop_layout="resources/pop_layout_elec_s{simpl}_{clusters}.csv",
europe_shape=pypsaeur('resources/europe_shape.geojson'), europe_shape=pypsaeur('resources/europe_shape.geojson'),
hotmaps_industrial_database="data/Industrial_Database.csv", hotmaps_industrial_database="data/Industrial_Database.csv",
network=pypsaeur('networks/{network}_s{simpl}_{clusters}.nc') network=pypsaeur('networks/elec_s{simpl}_{clusters}.nc')
output: output:
industrial_distribution_key="resources/industrial_distribution_key_{network}_s{simpl}_{clusters}.csv" industrial_distribution_key="resources/industrial_distribution_key_elec_s{simpl}_{clusters}.csv"
threads: 1 threads: 1
resources: mem_mb=1000 resources: mem_mb=1000
script: 'scripts/build_industrial_distribution_key.py' script: 'scripts/build_industrial_distribution_key.py'
@ -213,10 +213,10 @@ rule build_industrial_distribution_key:
rule build_industrial_production_per_node: rule build_industrial_production_per_node:
input: input:
industrial_distribution_key="resources/industrial_distribution_key_{network}_s{simpl}_{clusters}.csv", industrial_distribution_key="resources/industrial_distribution_key_elec_s{simpl}_{clusters}.csv",
industrial_production_per_country_tomorrow="resources/industrial_production_per_country_tomorrow.csv" industrial_production_per_country_tomorrow="resources/industrial_production_per_country_tomorrow.csv"
output: output:
industrial_production_per_node="resources/industrial_production_{network}_s{simpl}_{clusters}.csv" industrial_production_per_node="resources/industrial_production_elec_s{simpl}_{clusters}.csv"
threads: 1 threads: 1
resources: mem_mb=1000 resources: mem_mb=1000
script: 'scripts/build_industrial_production_per_node.py' script: 'scripts/build_industrial_production_per_node.py'
@ -225,10 +225,10 @@ rule build_industrial_production_per_node:
rule build_industrial_energy_demand_per_node: rule build_industrial_energy_demand_per_node:
input: input:
industry_sector_ratios="resources/industry_sector_ratios.csv", industry_sector_ratios="resources/industry_sector_ratios.csv",
industrial_production_per_node="resources/industrial_production_{network}_s{simpl}_{clusters}.csv", industrial_production_per_node="resources/industrial_production_elec_s{simpl}_{clusters}.csv",
industrial_energy_demand_per_node_today="resources/industrial_energy_demand_today_{network}_s{simpl}_{clusters}.csv" industrial_energy_demand_per_node_today="resources/industrial_energy_demand_today_elec_s{simpl}_{clusters}.csv"
output: output:
industrial_energy_demand_per_node="resources/industrial_energy_demand_{network}_s{simpl}_{clusters}.csv" industrial_energy_demand_per_node="resources/industrial_energy_demand_elec_s{simpl}_{clusters}.csv"
threads: 1 threads: 1
resources: mem_mb=1000 resources: mem_mb=1000
script: 'scripts/build_industrial_energy_demand_per_node.py' script: 'scripts/build_industrial_energy_demand_per_node.py'
@ -247,10 +247,10 @@ rule build_industrial_energy_demand_per_country_today:
rule build_industrial_energy_demand_per_node_today: rule build_industrial_energy_demand_per_node_today:
input: input:
industrial_distribution_key="resources/industrial_distribution_key_{network}_s{simpl}_{clusters}.csv", industrial_distribution_key="resources/industrial_distribution_key_elec_s{simpl}_{clusters}.csv",
industrial_energy_demand_per_country_today="resources/industrial_energy_demand_per_country_today.csv" industrial_energy_demand_per_country_today="resources/industrial_energy_demand_per_country_today.csv"
output: output:
industrial_energy_demand_per_node_today="resources/industrial_energy_demand_today_{network}_s{simpl}_{clusters}.csv" industrial_energy_demand_per_node_today="resources/industrial_energy_demand_today_elec_s{simpl}_{clusters}.csv"
threads: 1 threads: 1
resources: mem_mb=1000 resources: mem_mb=1000
script: 'scripts/build_industrial_energy_demand_per_node_today.py' script: 'scripts/build_industrial_energy_demand_per_node_today.py'
@ -270,10 +270,10 @@ rule build_industrial_energy_demand_per_country:
rule build_industrial_demand: rule build_industrial_demand:
input: input:
clustered_pop_layout="resources/pop_layout_{network}_s{simpl}_{clusters}.csv", clustered_pop_layout="resources/pop_layout_elec_s{simpl}_{clusters}.csv",
industrial_demand_per_country="resources/industrial_energy_demand_per_country.csv" industrial_demand_per_country="resources/industrial_energy_demand_per_country.csv"
output: output:
industrial_demand="resources/industrial_demand_{network}_s{simpl}_{clusters}.csv" industrial_demand="resources/industrial_demand_elec_s{simpl}_{clusters}.csv"
threads: 1 threads: 1
resources: mem_mb=1000 resources: mem_mb=1000
script: 'scripts/build_industrial_demand.py' script: 'scripts/build_industrial_demand.py'
@ -287,19 +287,19 @@ rule build_retro_cost:
tax_w="data/retro/electricity_taxes_eu.csv", tax_w="data/retro/electricity_taxes_eu.csv",
construction_index="data/retro/comparative_level_investment.csv", construction_index="data/retro/comparative_level_investment.csv",
floor_area_missing="data/retro/floor_area_missing.csv", floor_area_missing="data/retro/floor_area_missing.csv",
clustered_pop_layout="resources/pop_layout_{network}_s{simpl}_{clusters}.csv", clustered_pop_layout="resources/pop_layout_elec_s{simpl}_{clusters}.csv",
cost_germany="data/retro/retro_cost_germany.csv", cost_germany="data/retro/retro_cost_germany.csv",
window_assumptions="data/retro/window_assumptions.csv", window_assumptions="data/retro/window_assumptions.csv",
output: output:
retro_cost="resources/retro_cost_{network}_s{simpl}_{clusters}.csv", retro_cost="resources/retro_cost_elec_s{simpl}_{clusters}.csv",
floor_area="resources/floor_area_{network}_s{simpl}_{clusters}.csv" floor_area="resources/floor_area_elec_s{simpl}_{clusters}.csv"
resources: mem_mb=1000 resources: mem_mb=1000
script: "scripts/build_retro_cost.py" script: "scripts/build_retro_cost.py"
rule prepare_sector_network: rule prepare_sector_network:
input: input:
network=pypsaeur('networks/{network}_s{simpl}_{clusters}_ec_lv{lv}_{opts}.nc'), network=pypsaeur('networks/elec_s{simpl}_{clusters}_ec_lv{lv}_{opts}.nc'),
energy_totals_name='resources/energy_totals.csv', energy_totals_name='resources/energy_totals.csv',
co2_totals_name='resources/co2_totals.csv', co2_totals_name='resources/co2_totals.csv',
transport_name='resources/transport_data.csv', transport_name='resources/transport_data.csv',
@ -311,35 +311,35 @@ rule prepare_sector_network:
h2_cavern = "data/hydrogen_salt_cavern_potentials.csv", h2_cavern = "data/hydrogen_salt_cavern_potentials.csv",
profile_offwind_ac=pypsaeur("resources/profile_offwind-ac.nc"), profile_offwind_ac=pypsaeur("resources/profile_offwind-ac.nc"),
profile_offwind_dc=pypsaeur("resources/profile_offwind-dc.nc"), profile_offwind_dc=pypsaeur("resources/profile_offwind-dc.nc"),
busmap_s=pypsaeur("resources/busmap_{network}_s{simpl}.csv"), busmap_s=pypsaeur("resources/busmap_elec_s{simpl}.csv"),
busmap=pypsaeur("resources/busmap_{network}_s{simpl}_{clusters}.csv"), busmap=pypsaeur("resources/busmap_elec_s{simpl}_{clusters}.csv"),
clustered_pop_layout="resources/pop_layout_{network}_s{simpl}_{clusters}.csv", clustered_pop_layout="resources/pop_layout_elec_s{simpl}_{clusters}.csv",
simplified_pop_layout="resources/pop_layout_{network}_s{simpl}.csv", simplified_pop_layout="resources/pop_layout_elec_s{simpl}.csv",
industrial_demand="resources/industrial_energy_demand_{network}_s{simpl}_{clusters}.csv", industrial_demand="resources/industrial_energy_demand_elec_s{simpl}_{clusters}.csv",
heat_demand_urban="resources/heat_demand_urban_{network}_s{simpl}_{clusters}.nc", heat_demand_urban="resources/heat_demand_urban_elec_s{simpl}_{clusters}.nc",
heat_demand_rural="resources/heat_demand_rural_{network}_s{simpl}_{clusters}.nc", heat_demand_rural="resources/heat_demand_rural_elec_s{simpl}_{clusters}.nc",
heat_demand_total="resources/heat_demand_total_{network}_s{simpl}_{clusters}.nc", heat_demand_total="resources/heat_demand_total_elec_s{simpl}_{clusters}.nc",
temp_soil_total="resources/temp_soil_total_{network}_s{simpl}_{clusters}.nc", temp_soil_total="resources/temp_soil_total_elec_s{simpl}_{clusters}.nc",
temp_soil_rural="resources/temp_soil_rural_{network}_s{simpl}_{clusters}.nc", temp_soil_rural="resources/temp_soil_rural_elec_s{simpl}_{clusters}.nc",
temp_soil_urban="resources/temp_soil_urban_{network}_s{simpl}_{clusters}.nc", temp_soil_urban="resources/temp_soil_urban_elec_s{simpl}_{clusters}.nc",
temp_air_total="resources/temp_air_total_{network}_s{simpl}_{clusters}.nc", temp_air_total="resources/temp_air_total_elec_s{simpl}_{clusters}.nc",
temp_air_rural="resources/temp_air_rural_{network}_s{simpl}_{clusters}.nc", temp_air_rural="resources/temp_air_rural_elec_s{simpl}_{clusters}.nc",
temp_air_urban="resources/temp_air_urban_{network}_s{simpl}_{clusters}.nc", temp_air_urban="resources/temp_air_urban_elec_s{simpl}_{clusters}.nc",
cop_soil_total="resources/cop_soil_total_{network}_s{simpl}_{clusters}.nc", cop_soil_total="resources/cop_soil_total_elec_s{simpl}_{clusters}.nc",
cop_soil_rural="resources/cop_soil_rural_{network}_s{simpl}_{clusters}.nc", cop_soil_rural="resources/cop_soil_rural_elec_s{simpl}_{clusters}.nc",
cop_soil_urban="resources/cop_soil_urban_{network}_s{simpl}_{clusters}.nc", cop_soil_urban="resources/cop_soil_urban_elec_s{simpl}_{clusters}.nc",
cop_air_total="resources/cop_air_total_{network}_s{simpl}_{clusters}.nc", cop_air_total="resources/cop_air_total_elec_s{simpl}_{clusters}.nc",
cop_air_rural="resources/cop_air_rural_{network}_s{simpl}_{clusters}.nc", cop_air_rural="resources/cop_air_rural_elec_s{simpl}_{clusters}.nc",
cop_air_urban="resources/cop_air_urban_{network}_s{simpl}_{clusters}.nc", cop_air_urban="resources/cop_air_urban_elec_s{simpl}_{clusters}.nc",
solar_thermal_total="resources/solar_thermal_total_{network}_s{simpl}_{clusters}.nc", solar_thermal_total="resources/solar_thermal_total_elec_s{simpl}_{clusters}.nc",
solar_thermal_urban="resources/solar_thermal_urban_{network}_s{simpl}_{clusters}.nc", solar_thermal_urban="resources/solar_thermal_urban_elec_s{simpl}_{clusters}.nc",
solar_thermal_rural="resources/solar_thermal_rural_{network}_s{simpl}_{clusters}.nc", solar_thermal_rural="resources/solar_thermal_rural_elec_s{simpl}_{clusters}.nc",
retro_cost_energy = "resources/retro_cost_{network}_s{simpl}_{clusters}.csv", retro_cost_energy = "resources/retro_cost_elec_s{simpl}_{clusters}.csv",
floor_area = "resources/floor_area_{network}_s{simpl}_{clusters}.csv" floor_area = "resources/floor_area_elec_s{simpl}_{clusters}.csv"
output: config['results_dir'] + config['run'] + '/prenetworks/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc' output: config['results_dir'] + config['run'] + '/prenetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc'
threads: 1 threads: 1
resources: mem_mb=2000 resources: mem_mb=2000
benchmark: config['results_dir'] + config['run'] + "/benchmarks/prepare_network/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}" benchmark: config['results_dir'] + config['run'] + "/benchmarks/prepare_network/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}"
script: "scripts/prepare_sector_network.py" script: "scripts/prepare_sector_network.py"
@ -412,16 +412,16 @@ if config["foresight"] == "overnight":
rule solve_network: rule solve_network:
input: input:
network=config['results_dir'] + config['run'] + "/prenetworks/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc", network=config['results_dir'] + config['run'] + "/prenetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc",
costs=config['costs_dir'] + "costs_{planning_horizons}.csv", costs=config['costs_dir'] + "costs_{planning_horizons}.csv",
config=config['summary_dir'] + '/' + config['run'] + '/configs/config.yaml' config=config['summary_dir'] + '/' + config['run'] + '/configs/config.yaml'
output: config['results_dir'] + config['run'] + "/postnetworks/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc" output: config['results_dir'] + config['run'] + "/postnetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc"
shadow: "shallow" shadow: "shallow"
log: log:
solver=config['results_dir'] + config['run'] + "/logs/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_solver.log", solver=config['results_dir'] + config['run'] + "/logs/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_solver.log",
python=config['results_dir'] + config['run'] + "/logs/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_python.log", python=config['results_dir'] + config['run'] + "/logs/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_python.log",
memory=config['results_dir'] + config['run'] + "/logs/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_memory.log" memory=config['results_dir'] + config['run'] + "/logs/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_memory.log"
benchmark: config['results_dir'] + config['run'] + "/benchmarks/solve_network/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}" benchmark: config['results_dir'] + config['run'] + "/benchmarks/solve_network/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}"
threads: 4 threads: 4
resources: mem_mb=config['solving']['mem'] resources: mem_mb=config['solving']['mem']
# group: "solve" # with group, threads is ignored https://bitbucket.org/snakemake/snakemake/issues/971/group-job-description-does-not-contain # group: "solve" # with group, threads is ignored https://bitbucket.org/snakemake/snakemake/issues/971/group-job-description-does-not-contain
@ -432,15 +432,15 @@ if config["foresight"] == "myopic":
rule add_existing_baseyear: rule add_existing_baseyear:
input: input:
network=config['results_dir'] + config['run'] + '/prenetworks/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc', network=config['results_dir'] + config['run'] + '/prenetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc',
powerplants=pypsaeur('resources/powerplants.csv'), powerplants=pypsaeur('resources/powerplants.csv'),
busmap_s=pypsaeur("resources/busmap_{network}_s{simpl}.csv"), busmap_s=pypsaeur("resources/busmap_elec_s{simpl}.csv"),
busmap=pypsaeur("resources/busmap_{network}_s{simpl}_{clusters}.csv"), busmap=pypsaeur("resources/busmap_elec_s{simpl}_{clusters}.csv"),
clustered_pop_layout="resources/pop_layout_{network}_s{simpl}_{clusters}.csv", clustered_pop_layout="resources/pop_layout_elec_s{simpl}_{clusters}.csv",
costs=config['costs_dir'] + "costs_{}.csv".format(config['scenario']['planning_horizons'][0]), costs=config['costs_dir'] + "costs_{}.csv".format(config['scenario']['planning_horizons'][0]),
cop_soil_total="resources/cop_soil_total_{network}_s{simpl}_{clusters}.nc", cop_soil_total="resources/cop_soil_total_elec_s{simpl}_{clusters}.nc",
cop_air_total="resources/cop_air_total_{network}_s{simpl}_{clusters}.nc" cop_air_total="resources/cop_air_total_elec_s{simpl}_{clusters}.nc"
output: config['results_dir'] + config['run'] + '/prenetworks-brownfield/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc' output: config['results_dir'] + config['run'] + '/prenetworks-brownfield/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc'
wildcard_constraints: wildcard_constraints:
planning_horizons=config['scenario']['planning_horizons'][0] #only applies to baseyear planning_horizons=config['scenario']['planning_horizons'][0] #only applies to baseyear
threads: 1 threads: 1
@ -449,18 +449,18 @@ if config["foresight"] == "myopic":
def process_input(wildcards): def process_input(wildcards):
i = config["scenario"]["planning_horizons"].index(int(wildcards.planning_horizons)) i = config["scenario"]["planning_horizons"].index(int(wildcards.planning_horizons))
return config['results_dir'] + config['run'] + "/postnetworks/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_" + str(config["scenario"]["planning_horizons"][i-1]) + ".nc" return config['results_dir'] + config['run'] + "/postnetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_" + str(config["scenario"]["planning_horizons"][i-1]) + ".nc"
rule add_brownfield: rule add_brownfield:
input: input:
network=config['results_dir'] + config['run'] + '/prenetworks/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc', network=config['results_dir'] + config['run'] + '/prenetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc',
network_p=process_input, #solved network at previous time step network_p=process_input, #solved network at previous time step
costs=config['costs_dir'] + "costs_{planning_horizons}.csv", costs=config['costs_dir'] + "costs_{planning_horizons}.csv",
cop_soil_total="resources/cop_soil_total_{network}_s{simpl}_{clusters}.nc", cop_soil_total="resources/cop_soil_total_elec_s{simpl}_{clusters}.nc",
cop_air_total="resources/cop_air_total_{network}_s{simpl}_{clusters}.nc" cop_air_total="resources/cop_air_total_elec_s{simpl}_{clusters}.nc"
output: config['results_dir'] + config['run'] + "/prenetworks-brownfield/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc" output: config['results_dir'] + config['run'] + "/prenetworks-brownfield/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc"
threads: 4 threads: 4
resources: mem_mb=10000 resources: mem_mb=10000
script: "scripts/add_brownfield.py" script: "scripts/add_brownfield.py"
@ -469,16 +469,16 @@ if config["foresight"] == "myopic":
rule solve_network_myopic: rule solve_network_myopic:
input: input:
network=config['results_dir'] + config['run'] + "/prenetworks-brownfield/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc", network=config['results_dir'] + config['run'] + "/prenetworks-brownfield/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc",
costs=config['costs_dir'] + "costs_{planning_horizons}.csv", costs=config['costs_dir'] + "costs_{planning_horizons}.csv",
config=config['summary_dir'] + '/' + config['run'] + '/configs/config.yaml' config=config['summary_dir'] + '/' + config['run'] + '/configs/config.yaml'
output: config['results_dir'] + config['run'] + "/postnetworks/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc" output: config['results_dir'] + config['run'] + "/postnetworks/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}.nc"
shadow: "shallow" shadow: "shallow"
log: log:
solver=config['results_dir'] + config['run'] + "/logs/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_solver.log", solver=config['results_dir'] + config['run'] + "/logs/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_solver.log",
python=config['results_dir'] + config['run'] + "/logs/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_python.log", python=config['results_dir'] + config['run'] + "/logs/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_python.log",
memory=config['results_dir'] + config['run'] + "/logs/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_memory.log" memory=config['results_dir'] + config['run'] + "/logs/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}_memory.log"
benchmark: config['results_dir'] + config['run'] + "/benchmarks/solve_network/{network}_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}" benchmark: config['results_dir'] + config['run'] + "/benchmarks/solve_network/elec_s{simpl}_{clusters}_lv{lv}_{opts}_{sector_opts}_{planning_horizons}"
threads: 4 threads: 4
resources: mem_mb=config['solving']['mem'] resources: mem_mb=config['solving']['mem']
script: "scripts/solve_network.py" script: "scripts/solve_network.py"

View File

@ -375,6 +375,7 @@ plotting:
"process emissions to atmosphere" : "#888888" "process emissions to atmosphere" : "#888888"
"process emissions" : "#222222" "process emissions" : "#222222"
"oil emissions" : "#666666" "oil emissions" : "#666666"
"land transport oil emissions" : "#666666"
"land transport fuel cell" : "#AAAAAA" "land transport fuel cell" : "#AAAAAA"
"biogas" : "#800000" "biogas" : "#800000"
"solid biomass" : "#DAA520" "solid biomass" : "#DAA520"

View File

@ -9,6 +9,7 @@ Future release
* The cost database for retrofitting of the thermal envelope of buildings has been updated. Now, for calculating the space heat savings of a building, losses by thermal bridges and ventilation are included as well as heat gains (internal and by solar radiation). See the section :ref:`retro` for more details on the retrofitting module. * The cost database for retrofitting of the thermal envelope of buildings has been updated. Now, for calculating the space heat savings of a building, losses by thermal bridges and ventilation are included as well as heat gains (internal and by solar radiation). See the section :ref:`retro` for more details on the retrofitting module.
* Added an option to alter the capital cost or maximum capacity of carriers by a factor via ``carrier+factor`` in the ``{sector_opts}`` wildcard. This can be useful for exploring uncertain cost parameters. Example: ``solar+c0.5`` reduces the ``capital_cost`` of solar to 50\% of original values. Similarly ``solar+p3`` multiplies the ``p_nom_max`` by 3. * Added an option to alter the capital cost or maximum capacity of carriers by a factor via ``carrier+factor`` in the ``{sector_opts}`` wildcard. This can be useful for exploring uncertain cost parameters. Example: ``solar+c0.5`` reduces the ``capital_cost`` of solar to 50\% of original values. Similarly ``solar+p3`` multiplies the ``p_nom_max`` by 3.
* Rename the bus for European liquid hydrocarbons from ``Fischer-Tropsch`` to ``EU oil``, since it can be supplied not just with the Fischer-Tropsch process, but also with fossil oil. * Rename the bus for European liquid hydrocarbons from ``Fischer-Tropsch`` to ``EU oil``, since it can be supplied not just with the Fischer-Tropsch process, but also with fossil oil.
* Bugfix: The new separation of land transport by carrier in Version 0.4.0 failed to account for the carbon dioxide emissions from internal combustion engines. This is now treated as a negative load on the atmospheric carbon dioxide bus, just like aviation emissions.
* Bugfix: Fix reading in of ``pypsa-eur/resources/powerplants.csv`` to PyPSA-Eur Version 0.3.0 (use column attribute name ``DateIn`` instead of old ``YearDecommissioned``). * Bugfix: Fix reading in of ``pypsa-eur/resources/powerplants.csv`` to PyPSA-Eur Version 0.3.0 (use column attribute name ``DateIn`` instead of old ``YearDecommissioned``).
* Bugfix: Make sure that ``Store`` components (battery and H2) are also removed from PyPSA-Eur, so they can be added later by PyPSA-Eur-Sec. * Bugfix: Make sure that ``Store`` components (battery and H2) are also removed from PyPSA-Eur, so they can be added later by PyPSA-Eur-Sec.

View File

@ -90,12 +90,12 @@ if __name__ == "__main__":
sector_opts='Co2L0-168H-T-H-B-I-solar3-dist1', sector_opts='Co2L0-168H-T-H-B-I-solar3-dist1',
co2_budget_name='go', co2_budget_name='go',
planning_horizons='2030'), planning_horizons='2030'),
input=dict(network='pypsa-eur-sec/results/test/prenetworks/{network}_s{simpl}_{clusters}_lv{lv}__{sector_opts}_{co2_budget_name}_{planning_horizons}.nc', input=dict(network='pypsa-eur-sec/results/test/prenetworks/elec_s{simpl}_{clusters}_lv{lv}__{sector_opts}_{co2_budget_name}_{planning_horizons}.nc',
network_p='pypsa-eur-sec/results/test/postnetworks/{network}_s{simpl}_{clusters}_lv{lv}__{sector_opts}_{co2_budget_name}_2020.nc', network_p='pypsa-eur-sec/results/test/postnetworks/elec_s{simpl}_{clusters}_lv{lv}__{sector_opts}_{co2_budget_name}_2020.nc',
costs='pypsa-eur-sec/data/costs/costs_{planning_horizons}.csv', costs='pypsa-eur-sec/data/costs/costs_{planning_horizons}.csv',
cop_air_total="pypsa-eur-sec/resources/cop_air_total_{network}_s{simpl}_{clusters}.nc", cop_air_total="pypsa-eur-sec/resources/cop_air_total_elec_s{simpl}_{clusters}.nc",
cop_soil_total="pypsa-eur-sec/resources/cop_soil_total_{network}_s{simpl}_{clusters}.nc"), cop_soil_total="pypsa-eur-sec/resources/cop_soil_total_elec_s{simpl}_{clusters}.nc"),
output=['pypsa-eur-sec/results/test/prenetworks_brownfield/{network}_s{simpl}_{clusters}_lv{lv}__{sector_opts}_{planning_horizons}.nc'] output=['pypsa-eur-sec/results/test/prenetworks_brownfield/elec_s{simpl}_{clusters}_lv{lv}__{sector_opts}_{planning_horizons}.nc']
) )
import yaml import yaml
with open('config.yaml', encoding='utf8') as f: with open('config.yaml', encoding='utf8') as f:

View File

@ -411,15 +411,15 @@ if __name__ == "__main__":
wildcards=dict(network='elec', simpl='', clusters='45', lv='1.0', wildcards=dict(network='elec', simpl='', clusters='45', lv='1.0',
sector_opts='Co2L0-3H-T-H-B-I-solar3-dist1', sector_opts='Co2L0-3H-T-H-B-I-solar3-dist1',
planning_horizons='2020'), planning_horizons='2020'),
input=dict(network='pypsa-eur-sec/results/version-2/prenetworks/{network}_s{simpl}_{clusters}_lv{lv}__{sector_opts}_{planning_horizons}.nc', input=dict(network='pypsa-eur-sec/results/version-2/prenetworks/elec_s{simpl}_{clusters}_lv{lv}__{sector_opts}_{planning_horizons}.nc',
powerplants='pypsa-eur/resources/powerplants.csv', powerplants='pypsa-eur/resources/powerplants.csv',
busmap_s='pypsa-eur/resources/busmap_{network}_s{simpl}.csv', busmap_s='pypsa-eur/resources/busmap_elec_s{simpl}.csv',
busmap='pypsa-eur/resources/busmap_{network}_s{simpl}_{clusters}.csv', busmap='pypsa-eur/resources/busmap_elec_s{simpl}_{clusters}.csv',
costs='technology_data/outputs/costs_{planning_horizons}.csv', costs='technology_data/outputs/costs_{planning_horizons}.csv',
cop_air_total="pypsa-eur-sec/resources/cop_air_total_{network}_s{simpl}_{clusters}.nc", cop_air_total="pypsa-eur-sec/resources/cop_air_total_elec_s{simpl}_{clusters}.nc",
cop_soil_total="pypsa-eur-sec/resources/cop_soil_total_{network}_s{simpl}_{clusters}.nc", cop_soil_total="pypsa-eur-sec/resources/cop_soil_total_elec_s{simpl}_{clusters}.nc",
clustered_pop_layout="pypsa-eur-sec/resources/pop_layout_{network}_s{simpl}_{clusters}.csv",), clustered_pop_layout="pypsa-eur-sec/resources/pop_layout_elec_s{simpl}_{clusters}.csv",),
output=['pypsa-eur-sec/results/version-2/prenetworks_brownfield/{network}_s{simpl}_{clusters}_lv{lv}__{sector_opts}_{planning_horizons}.nc'], output=['pypsa-eur-sec/results/version-2/prenetworks_brownfield/elec_s{simpl}_{clusters}_lv{lv}__{sector_opts}_{planning_horizons}.nc'],
) )
import yaml import yaml
with open('config.yaml', encoding='utf8') as f: with open('config.yaml', encoding='utf8') as f:

View File

@ -1,4 +1,3 @@
import pandas as pd import pandas as pd
import geopandas as gpd import geopandas as gpd
@ -51,7 +50,6 @@ country_to_code = {
'Switzerland' : 'CH', 'Switzerland' : 'CH',
} }
non_EU = ['NO', 'CH', 'ME', 'MK', 'RS', 'BA', 'AL'] non_EU = ['NO', 'CH', 'ME', 'MK', 'RS', 'BA', 'AL']
rename = {"GR" : "EL", rename = {"GR" : "EL",
@ -73,7 +71,6 @@ def build_eurostat(year):
fns = {2016: "data/eurostat-energy_balances-june_2016_edition/{year}-Energy-Balances-June2016edition.xlsx", fns = {2016: "data/eurostat-energy_balances-june_2016_edition/{year}-Energy-Balances-June2016edition.xlsx",
2017: "data/eurostat-energy_balances-june_2017_edition/{year}-ENERGY-BALANCES-June2017edition.xlsx"} 2017: "data/eurostat-energy_balances-june_2017_edition/{year}-ENERGY-BALANCES-June2017edition.xlsx"}
#2016 includes BA, 2017 doesn't #2016 includes BA, 2017 doesn't
#with sheet as None, an ordered dictionary of all sheets is returned #with sheet as None, an ordered dictionary of all sheets is returned
@ -82,7 +79,6 @@ def build_eurostat(year):
skiprows=1, skiprows=1,
index_col=list(range(4))) index_col=list(range(4)))
#sorted_index necessary for slicing #sorted_index necessary for slicing
df = pd.concat({country_to_code[df.columns[0]] : df for ct,df in dfs.items()},sort=True).sort_index() df = pd.concat({country_to_code[df.columns[0]] : df for ct,df in dfs.items()},sort=True).sort_index()
@ -91,15 +87,12 @@ def build_eurostat(year):
def build_swiss(year): def build_swiss(year):
fn = "data/switzerland-sfoe/switzerland-new_format.csv" fn = "data/switzerland-sfoe/switzerland-new_format.csv"
#convert PJ/a to TWh/a #convert PJ/a to TWh/a
return (pd.read_csv(fn,index_col=list(range(2)))/3.6).loc["CH",str(year)] return (pd.read_csv(fn,index_col=list(range(2)))/3.6).loc["CH",str(year)]
def build_idees(year): def build_idees(year):
base_dir = "data/jrc-idees-2015" base_dir = "data/jrc-idees-2015"
@ -275,7 +268,7 @@ def build_idees(year):
return totals return totals
def build_energy_totals(): def build_energy_totals(eurostat, swiss, idees):
clean_df = idees.reindex(population.index).drop(["passenger cars","passenger car efficiency"],axis=1) clean_df = idees.reindex(population.index).drop(["passenger cars","passenger car efficiency"],axis=1)
@ -316,7 +309,6 @@ def build_energy_totals():
+ avg*(clean_df.loc[missing_in_eurostat,"{} {}".format("total",sector)] - clean_df.loc[missing_in_eurostat,"{} {}".format("electricity",sector)]) + avg*(clean_df.loc[missing_in_eurostat,"{} {}".format("total",sector)] - clean_df.loc[missing_in_eurostat,"{} {}".format("electricity",sector)])
#Fix Norway space and water heating fractions #Fix Norway space and water heating fractions
#http://www.ssb.no/en/energi-og-industri/statistikker/husenergi/hvert-3-aar/2014-07-14 #http://www.ssb.no/en/energi-og-industri/statistikker/husenergi/hvert-3-aar/2014-07-14
#The main heating source for about 73 per cent of the households is based on electricity #The main heating source for about 73 per cent of the households is based on electricity
@ -458,14 +450,12 @@ def build_eurostat_co2(year=1990):
#Residual oil (No. 6) 0.298 #Residual oil (No. 6) 0.298
#https://www.eia.gov/electricity/annual/html/epa_a_03.html #https://www.eia.gov/electricity/annual/html/epa_a_03.html
eurostat_co2 = eurostat_for_co2.multiply(se).sum(axis=1) eurostat_co2 = eurostat_for_co2.multiply(se).sum(axis=1)
return eurostat_co2 return eurostat_co2
def build_co2_totals(eea_co2, eurostat_co2, year=1990): def build_co2_totals(eea_co2, eurostat_co2):
co2 = eea_co2.reindex(["EU28","NO","CH","BA","RS","AL","ME","MK"] + eu28) co2 = eea_co2.reindex(["EU28","NO","CH","BA","RS","AL","ME","MK"] + eu28)
@ -530,7 +520,6 @@ def build_transport_data():
if __name__ == "__main__": if __name__ == "__main__":
# Detect running outside of snakemake and mock snakemake for testing # Detect running outside of snakemake and mock snakemake for testing
if 'snakemake' not in globals(): if 'snakemake' not in globals():
from vresutils import Dict from vresutils import Dict
@ -546,21 +535,19 @@ if __name__ == "__main__":
nuts3 = gpd.read_file(snakemake.input.nuts3_shapes).set_index('index') nuts3 = gpd.read_file(snakemake.input.nuts3_shapes).set_index('index')
population = nuts3['pop'].groupby(nuts3.country).sum() population = nuts3['pop'].groupby(nuts3.country).sum()
year = 2011 data_year = 2011
eurostat = build_eurostat(data_year)
swiss = build_swiss(data_year)
idees = build_idees(data_year)
eurostat = build_eurostat(year) build_energy_totals(eurostat, swiss, idees)
swiss = build_swiss(year)
idees = build_idees(year) base_year_emissions = 1990
eea_co2 = build_eea_co2(base_year_emissions)
build_energy_totals() eurostat_co2 = build_eurostat_co2(base_year_emissions)
eea_co2 = build_eea_co2() co2 = build_co2_totals(eea_co2, eurostat_co2)
eurostat_co2 = build_eurostat_co2()
co2=build_co2_totals(eea_co2, eurostat_co2, year)
co2.to_csv(snakemake.output.co2_name) co2.to_csv(snakemake.output.co2_name)
build_transport_data() build_transport_data()

View File

@ -11,7 +11,7 @@ if 'snakemake' not in globals():
import yaml import yaml
snakemake = Dict() snakemake = Dict()
with open('config.yaml') as f: with open('config.yaml') as f:
snakemake.config = yaml.load(f) snakemake.config = yaml.safe_load(f)
snakemake.input = Dict() snakemake.input = Dict()
snakemake.output = Dict() snakemake.output = Dict()

View File

@ -98,7 +98,7 @@ for ct in eu28:
for fuel in fuels: for fuel in fuels:
summary.at[fuel,sub] = s[fuels[fuel]].sum() summary.at[fuel,sub] = s[fuels[fuel]].sum()
summary.at['other',sub] = summary.at['all',sub] - summary.loc[summary.index^['all','other'],sub].sum() summary.at['other',sub] = summary.at['all',sub] - summary.loc[summary.index.symmetric_difference(['all','other']),sub].sum()
summary['Other Industrial Sectors'] = summary[ois_subs].sum(axis=1) summary['Other Industrial Sectors'] = summary[ois_subs].sum(axis=1)
summary.drop(columns=ois_subs,inplace=True) summary.drop(columns=ois_subs,inplace=True)
@ -128,7 +128,7 @@ output = pd.read_csv(snakemake.input.industrial_production_per_country,
eu28_averages = final_summary.groupby(level=1,axis=1).sum().divide(output.loc[eu28].sum(),axis=1) eu28_averages = final_summary.groupby(level=1,axis=1).sum().divide(output.loc[eu28].sum(),axis=1)
non_eu28 = output.index^eu28 non_eu28 = output.index.symmetric_difference(eu28)
for ct in non_eu28: for ct in non_eu28:
print(ct) print(ct)

View File

@ -196,7 +196,7 @@ ammonia = pd.read_csv(snakemake.input.ammonia_production,
index_col=0) index_col=0)
there = ammonia.index.intersection(countries_demand.index) there = ammonia.index.intersection(countries_demand.index)
missing = countries_demand.index^there missing = countries_demand.index.symmetric_difference(there)
print("Following countries have no ammonia demand:", missing) print("Following countries have no ammonia demand:", missing)

View File

@ -15,7 +15,7 @@ if 'snakemake' not in globals():
import yaml import yaml
snakemake = Dict() snakemake = Dict()
with open('config.yaml') as f: with open('config.yaml') as f:
snakemake.config = yaml.load(f) snakemake.config = yaml.safe_load(f)
snakemake.input = Dict() snakemake.input = Dict()
snakemake.output = Dict() snakemake.output = Dict()
@ -46,7 +46,7 @@ urban_fraction = pd.read_csv(snakemake.input.urban_percent,
#fill missing Balkans values #fill missing Balkans values
missing = ["AL","ME","MK"] missing = ["AL","ME","MK"]
reference = ["RS","BA"] reference = ["RS","BA"]
urban_fraction = urban_fraction.reindex(urban_fraction.index|missing) urban_fraction = urban_fraction.reindex(urban_fraction.index.union(missing))
urban_fraction.loc[missing] = urban_fraction[reference].mean() urban_fraction.loc[missing] = urban_fraction[reference].mean()

View File

@ -774,6 +774,7 @@ def sample_dE_costs_area(area, area_tot, costs, dE_space, countries,
.from_product([[ct], cost_dE.index.levels[1]]))) .from_product([[ct], cost_dE.index.levels[1]])))
cost_dE = cost_dE.append(averaged_data) cost_dE = cost_dE.append(averaged_data)
# weights costs after construction index # weights costs after construction index
if construction_index: if construction_index:
for ct in list(map_for_missings.keys() - cost_w.index): for ct in list(map_for_missings.keys() - cost_w.index):
@ -844,12 +845,12 @@ if __name__ == "__main__":
tax_w="data/retro/electricity_taxes_eu.csv", tax_w="data/retro/electricity_taxes_eu.csv",
construction_index="data/retro/comparative_level_investment.csv", construction_index="data/retro/comparative_level_investment.csv",
floor_area_missing="data/retro/floor_area_missing.csv", floor_area_missing="data/retro/floor_area_missing.csv",
clustered_pop_layout="resources/pop_layout_{network}_s{simpl}_{clusters}.csv", clustered_pop_layout="resources/pop_layout_elec_s{simpl}_{clusters}.csv",
cost_germany="data/retro/retro_cost_germany.csv", cost_germany="data/retro/retro_cost_germany.csv",
window_assumptions="data/retro/window_assumptions.csv"), window_assumptions="data/retro/window_assumptions.csv"),
output=dict( output=dict(
retro_cost="resources/retro_cost_{network}_s{simpl}_{clusters}.csv", retro_cost="resources/retro_cost_elec_s{simpl}_{clusters}.csv",
floor_area="resources/floor_area_{network}_s{simpl}_{clusters}.csv") floor_area="resources/floor_area_elec_s{simpl}_{clusters}.csv")
) )
with open('config.yaml', encoding='utf8') as f: with open('config.yaml', encoding='utf8') as f:
snakemake.config = yaml.safe_load(f) snakemake.config = yaml.safe_load(f)

View File

@ -11,7 +11,7 @@ if 'snakemake' not in globals():
import yaml import yaml
snakemake = Dict() snakemake = Dict()
with open('config.yaml') as f: with open('config.yaml') as f:
snakemake.config = yaml.load(f) snakemake.config = yaml.safe_load(f)
snakemake.input = Dict() snakemake.input = Dict()
snakemake.output = Dict() snakemake.output = Dict()

View File

@ -11,7 +11,7 @@ if 'snakemake' not in globals():
import yaml import yaml
snakemake = Dict() snakemake = Dict()
with open('config.yaml') as f: with open('config.yaml') as f:
snakemake.config = yaml.load(f) snakemake.config = yaml.safe_load(f)
snakemake.input = Dict() snakemake.input = Dict()
snakemake.output = Dict() snakemake.output = Dict()

View File

@ -79,7 +79,7 @@ def calculate_nodal_cfs(n,label,nodal_cfs):
cf_c = p_c/capacities_c cf_c = p_c/capacities_c
index = pd.MultiIndex.from_tuples([(c.list_name,) + t for t in cf_c.index.to_list()]) index = pd.MultiIndex.from_tuples([(c.list_name,) + t for t in cf_c.index.to_list()])
nodal_cfs = nodal_cfs.reindex(index|nodal_cfs.index) nodal_cfs = nodal_cfs.reindex(index.union(nodal_cfs.index))
nodal_cfs.loc[index,label] = cf_c.values nodal_cfs.loc[index,label] = cf_c.values
return nodal_cfs return nodal_cfs
@ -106,7 +106,7 @@ def calculate_cfs(n,label,cfs):
cf_c = pd.concat([cf_c], keys=[c.list_name]) cf_c = pd.concat([cf_c], keys=[c.list_name])
cfs = cfs.reindex(cf_c.index|cfs.index) cfs = cfs.reindex(cf_c.index.union(cfs.index))
cfs.loc[cf_c.index,label] = cf_c cfs.loc[cf_c.index,label] = cf_c
@ -121,7 +121,7 @@ def calculate_nodal_costs(n,label,nodal_costs):
c.df["capital_costs"] = c.df.capital_cost*c.df[opt_name.get(c.name,"p") + "_nom_opt"] c.df["capital_costs"] = c.df.capital_cost*c.df[opt_name.get(c.name,"p") + "_nom_opt"]
capital_costs = c.df.groupby(["location","carrier"])["capital_costs"].sum() capital_costs = c.df.groupby(["location","carrier"])["capital_costs"].sum()
index = pd.MultiIndex.from_tuples([(c.list_name,"capital") + t for t in capital_costs.index.to_list()]) index = pd.MultiIndex.from_tuples([(c.list_name,"capital") + t for t in capital_costs.index.to_list()])
nodal_costs = nodal_costs.reindex(index|nodal_costs.index) nodal_costs = nodal_costs.reindex(index.union(nodal_costs.index))
nodal_costs.loc[index,label] = capital_costs.values nodal_costs.loc[index,label] = capital_costs.values
if c.name == "Link": if c.name == "Link":
@ -143,7 +143,7 @@ def calculate_nodal_costs(n,label,nodal_costs):
c.df["marginal_costs"] = p*c.df.marginal_cost c.df["marginal_costs"] = p*c.df.marginal_cost
marginal_costs = c.df.groupby(["location","carrier"])["marginal_costs"].sum() marginal_costs = c.df.groupby(["location","carrier"])["marginal_costs"].sum()
index = pd.MultiIndex.from_tuples([(c.list_name,"marginal") + t for t in marginal_costs.index.to_list()]) index = pd.MultiIndex.from_tuples([(c.list_name,"marginal") + t for t in marginal_costs.index.to_list()])
nodal_costs = nodal_costs.reindex(index|nodal_costs.index) nodal_costs = nodal_costs.reindex(index.union(nodal_costs.index))
nodal_costs.loc[index,label] = marginal_costs.values nodal_costs.loc[index,label] = marginal_costs.values
return nodal_costs return nodal_costs
@ -158,7 +158,7 @@ def calculate_costs(n,label,costs):
capital_costs_grouped = pd.concat([capital_costs_grouped], keys=["capital"]) capital_costs_grouped = pd.concat([capital_costs_grouped], keys=["capital"])
capital_costs_grouped = pd.concat([capital_costs_grouped], keys=[c.list_name]) capital_costs_grouped = pd.concat([capital_costs_grouped], keys=[c.list_name])
costs = costs.reindex(capital_costs_grouped.index|costs.index) costs = costs.reindex(capital_costs_grouped.index.union(costs.index))
costs.loc[capital_costs_grouped.index,label] = capital_costs_grouped costs.loc[capital_costs_grouped.index,label] = capital_costs_grouped
@ -185,7 +185,7 @@ def calculate_costs(n,label,costs):
marginal_costs_grouped = pd.concat([marginal_costs_grouped], keys=["marginal"]) marginal_costs_grouped = pd.concat([marginal_costs_grouped], keys=["marginal"])
marginal_costs_grouped = pd.concat([marginal_costs_grouped], keys=[c.list_name]) marginal_costs_grouped = pd.concat([marginal_costs_grouped], keys=[c.list_name])
costs = costs.reindex(marginal_costs_grouped.index|costs.index) costs = costs.reindex(marginal_costs_grouped.index.union(costs.index))
costs.loc[marginal_costs_grouped.index,label] = marginal_costs_grouped costs.loc[marginal_costs_grouped.index,label] = marginal_costs_grouped
@ -220,7 +220,7 @@ def calculate_nodal_capacities(n,label,nodal_capacities):
for c in n.iterate_components(n.branch_components|n.controllable_one_port_components^{"Load"}): for c in n.iterate_components(n.branch_components|n.controllable_one_port_components^{"Load"}):
nodal_capacities_c = c.df.groupby(["location","carrier"])[opt_name.get(c.name,"p") + "_nom_opt"].sum() nodal_capacities_c = c.df.groupby(["location","carrier"])[opt_name.get(c.name,"p") + "_nom_opt"].sum()
index = pd.MultiIndex.from_tuples([(c.list_name,) + t for t in nodal_capacities_c.index.to_list()]) index = pd.MultiIndex.from_tuples([(c.list_name,) + t for t in nodal_capacities_c.index.to_list()])
nodal_capacities = nodal_capacities.reindex(index|nodal_capacities.index) nodal_capacities = nodal_capacities.reindex(index.union(nodal_capacities.index))
nodal_capacities.loc[index,label] = nodal_capacities_c.values nodal_capacities.loc[index,label] = nodal_capacities_c.values
return nodal_capacities return nodal_capacities
@ -234,7 +234,7 @@ def calculate_capacities(n,label,capacities):
capacities_grouped = c.df[opt_name.get(c.name,"p") + "_nom_opt"].groupby(c.df.carrier).sum() capacities_grouped = c.df[opt_name.get(c.name,"p") + "_nom_opt"].groupby(c.df.carrier).sum()
capacities_grouped = pd.concat([capacities_grouped], keys=[c.list_name]) capacities_grouped = pd.concat([capacities_grouped], keys=[c.list_name])
capacities = capacities.reindex(capacities_grouped.index|capacities.index) capacities = capacities.reindex(capacities_grouped.index.union(capacities.index))
capacities.loc[capacities_grouped.index,label] = capacities_grouped capacities.loc[capacities_grouped.index,label] = capacities_grouped
@ -267,7 +267,7 @@ def calculate_energy(n,label,energy):
c_energies = pd.concat([c_energies], keys=[c.list_name]) c_energies = pd.concat([c_energies], keys=[c.list_name])
energy = energy.reindex(c_energies.index|energy.index) energy = energy.reindex(c_energies.index.union(energy.index))
energy.loc[c_energies.index,label] = c_energies energy.loc[c_energies.index,label] = c_energies
@ -294,7 +294,7 @@ def calculate_supply(n,label,supply):
s = pd.concat([s], keys=[c.list_name]) s = pd.concat([s], keys=[c.list_name])
s = pd.concat([s], keys=[i]) s = pd.concat([s], keys=[i])
supply = supply.reindex(s.index|supply.index) supply = supply.reindex(s.index.union(supply.index))
supply.loc[s.index,label] = s supply.loc[s.index,label] = s
@ -313,7 +313,7 @@ def calculate_supply(n,label,supply):
s = pd.concat([s], keys=[c.list_name]) s = pd.concat([s], keys=[c.list_name])
s = pd.concat([s], keys=[i]) s = pd.concat([s], keys=[i])
supply = supply.reindex(s.index|supply.index) supply = supply.reindex(s.index.union(supply.index))
supply.loc[s.index,label] = s supply.loc[s.index,label] = s
return supply return supply
@ -339,7 +339,7 @@ def calculate_supply_energy(n,label,supply_energy):
s = pd.concat([s], keys=[c.list_name]) s = pd.concat([s], keys=[c.list_name])
s = pd.concat([s], keys=[i]) s = pd.concat([s], keys=[i])
supply_energy = supply_energy.reindex(s.index|supply_energy.index) supply_energy = supply_energy.reindex(s.index.union(supply_energy.index))
supply_energy.loc[s.index,label] = s supply_energy.loc[s.index,label] = s
@ -357,7 +357,7 @@ def calculate_supply_energy(n,label,supply_energy):
s = pd.concat([s], keys=[c.list_name]) s = pd.concat([s], keys=[c.list_name])
s = pd.concat([s], keys=[i]) s = pd.concat([s], keys=[i])
supply_energy = supply_energy.reindex(s.index|supply_energy.index) supply_energy = supply_energy.reindex(s.index.union(supply_energy.index))
supply_energy.loc[s.index,label] = s supply_energy.loc[s.index,label] = s
@ -366,7 +366,7 @@ def calculate_supply_energy(n,label,supply_energy):
def calculate_metrics(n,label,metrics): def calculate_metrics(n,label,metrics):
metrics = metrics.reindex(pd.Index(["line_volume","line_volume_limit","line_volume_AC","line_volume_DC","line_volume_shadow","co2_shadow"])|metrics.index) metrics = metrics.reindex(pd.Index(["line_volume","line_volume_limit","line_volume_AC","line_volume_DC","line_volume_shadow","co2_shadow"]).union(metrics.index))
metrics.at["line_volume_DC",label] = (n.links.length*n.links.p_nom_opt)[n.links.carrier == "DC"].sum() metrics.at["line_volume_DC",label] = (n.links.length*n.links.p_nom_opt)[n.links.carrier == "DC"].sum()
metrics.at["line_volume_AC",label] = (n.lines.length*n.lines.s_nom_opt).sum() metrics.at["line_volume_AC",label] = (n.lines.length*n.lines.s_nom_opt).sum()
@ -384,7 +384,7 @@ def calculate_metrics(n,label,metrics):
def calculate_prices(n,label,prices): def calculate_prices(n,label,prices):
prices = prices.reindex(prices.index|n.buses.carrier.unique()) prices = prices.reindex(prices.index.union(n.buses.carrier.unique()))
#WARNING: this is time-averaged, see weighted_prices for load-weighted average #WARNING: this is time-averaged, see weighted_prices for load-weighted average
prices[label] = n.buses_t.marginal_price.mean().groupby(n.buses.carrier).mean() prices[label] = n.buses_t.marginal_price.mean().groupby(n.buses.carrier).mean()
@ -467,7 +467,7 @@ def calculate_market_values(n, label, market_values):
techs = n.generators.loc[generators,"carrier"].value_counts().index techs = n.generators.loc[generators,"carrier"].value_counts().index
market_values = market_values.reindex(market_values.index | techs) market_values = market_values.reindex(market_values.index.union(techs))
for tech in techs: for tech in techs:
@ -488,7 +488,7 @@ def calculate_market_values(n, label, market_values):
techs = n.links.loc[all_links,"carrier"].value_counts().index techs = n.links.loc[all_links,"carrier"].value_counts().index
market_values = market_values.reindex(market_values.index | techs) market_values = market_values.reindex(market_values.index.union(techs))
for tech in techs: for tech in techs:
links = all_links[n.links.loc[all_links,"carrier"] == tech] links = all_links[n.links.loc[all_links,"carrier"] == tech]
@ -505,7 +505,7 @@ def calculate_market_values(n, label, market_values):
def calculate_price_statistics(n, label, price_statistics): def calculate_price_statistics(n, label, price_statistics):
price_statistics = price_statistics.reindex(price_statistics.index|pd.Index(["zero_hours","mean","standard_deviation"])) price_statistics = price_statistics.reindex(price_statistics.index.union(pd.Index(["zero_hours","mean","standard_deviation"])))
buses = n.buses.index[n.buses.carrier == "AC"] buses = n.buses.index[n.buses.carrier == "AC"]

View File

@ -130,7 +130,7 @@ def plot_map(network, components=["links", "stores", "storage_units", "generator
costs.drop(list(costs.columns[(costs == 0.).all()]), axis=1, inplace=True) costs.drop(list(costs.columns[(costs == 0.).all()]), axis=1, inplace=True)
new_columns = ((preferred_order & costs.columns) new_columns = (preferred_order.intersection(costs.columns)
.append(costs.columns.difference(preferred_order))) .append(costs.columns.difference(preferred_order)))
costs = costs[new_columns] costs = costs[new_columns]
@ -147,7 +147,7 @@ def plot_map(network, components=["links", "stores", "storage_units", "generator
n.links.carrier != "B2B")], inplace=True) n.links.carrier != "B2B")], inplace=True)
# drop non-bus # drop non-bus
to_drop = costs.index.levels[0] ^ n.buses.index to_drop = costs.index.levels[0].symmetric_difference(n.buses.index)
if len(to_drop) != 0: if len(to_drop) != 0:
print("dropping non-buses", to_drop) print("dropping non-buses", to_drop)
costs.drop(to_drop, level=0, inplace=True, axis=0) costs.drop(to_drop, level=0, inplace=True, axis=0)
@ -463,7 +463,7 @@ def plot_series(network, carrier="AC", name="test"):
"battery storage", "battery storage",
"hot water storage"]) "hot water storage"])
new_columns = ((preferred_order & supply.columns) new_columns = (preferred_order.intersection(supply.columns)
.append(supply.columns.difference(preferred_order))) .append(supply.columns.difference(preferred_order)))
supply = supply.groupby(supply.columns, axis=1).sum() supply = supply.groupby(supply.columns, axis=1).sum()

View File

@ -82,7 +82,7 @@ def plot_costs():
print(df.sum()) print(df.sum())
new_index = (preferred_order&df.index).append(df.index.difference(preferred_order)) new_index = preferred_order.intersection(df.index).append(df.index.difference(preferred_order))
new_columns = df.sum().sort_values().index new_columns = df.sum().sort_values().index
@ -136,7 +136,7 @@ def plot_energy():
print(df) print(df)
new_index = (preferred_order&df.index).append(df.index.difference(preferred_order)) new_index = preferred_order.intersection(df.index).append(df.index.difference(preferred_order))
new_columns = df.columns.sort_values() new_columns = df.columns.sort_values()
#new_columns = df.sum().sort_values().index #new_columns = df.sum().sort_values().index
@ -177,7 +177,7 @@ def plot_balances():
balances_df = pd.read_csv(snakemake.input.balances,index_col=list(range(3)),header=list(range(n_header))) balances_df = pd.read_csv(snakemake.input.balances,index_col=list(range(3)),header=list(range(n_header)))
balances = {i.replace(" ","_") : [i] for i in balances_df.index.levels[0]} balances = {i.replace(" ","_") : [i] for i in balances_df.index.levels[0]}
balances["energy"] = balances_df.index.levels[0]^co2_carriers balances["energy"] = balances_df.index.levels[0].symmetric_difference(co2_carriers)
for k,v in balances.items(): for k,v in balances.items():
@ -205,7 +205,7 @@ def plot_balances():
if df.empty: if df.empty:
continue continue
new_index = (preferred_order&df.index).append(df.index.difference(preferred_order)) new_index = preferred_order.intersection(df.index).append(df.index.difference(preferred_order))
new_columns = df.columns.sort_values() new_columns = df.columns.sort_values()

View File

@ -6,9 +6,8 @@ import pandas as pd
idx = pd.IndexSlice idx = pd.IndexSlice
import numpy as np import numpy as np
import scipy as sp
import xarray as xr import xarray as xr
import re, os import re, os, sys
from six import iteritems, string_types from six import iteritems, string_types
@ -50,22 +49,19 @@ override_component_attrs["Store"].loc["lifetime"] = ["float","years",np.nan,"lif
def co2_emissions_year(cts, opts, year): def co2_emissions_year(cts, opts, year):
""" """
calculate co2 emissions in one specific year (e.g. 1990 or 2018). Calculate CO2 emissions in one specific year (e.g. 1990 or 2018).
""" """
eea_co2 = build_eea_co2(year) eea_co2 = build_eea_co2(year)
#TODO: read Eurostat data from year>2014, this only affects the estimation of # TODO: read Eurostat data from year>2014, this only affects the estimation of
# CO2 emissions for "BA","RS","AL","ME","MK" # CO2 emissions for "BA","RS","AL","ME","MK"
if year > 2014: if year > 2014:
eurostat_co2 = build_eurostat_co2(year=2014) eurostat_co2 = build_eurostat_co2(year=2014)
else: else:
eurostat_co2 = build_eurostat_co2(year) eurostat_co2 = build_eurostat_co2(year)
co2_totals=build_co2_totals(eea_co2, eurostat_co2, year) co2_totals = build_co2_totals(eea_co2, eurostat_co2)
co2_emissions = co2_totals.loc[cts, "electricity"].sum() co2_emissions = co2_totals.loc[cts, "electricity"].sum()
@ -77,11 +73,11 @@ def co2_emissions_year(cts, opts, year):
co2_emissions += co2_totals.loc[cts, ["industrial non-elec","industrial processes", co2_emissions += co2_totals.loc[cts, ["industrial non-elec","industrial processes",
"domestic aviation","international aviation", "domestic aviation","international aviation",
"domestic navigation","international navigation"]].sum().sum() "domestic navigation","international navigation"]].sum().sum()
co2_emissions *=0.001 #MtCO2 to GtCO2
co2_emissions *= 0.001 # Convert MtCO2 to GtCO2
return co2_emissions return co2_emissions
def build_carbon_budget(o): def build_carbon_budget(o):
#distribute carbon budget following beta or exponential transition path #distribute carbon budget following beta or exponential transition path
if "be" in o: if "be" in o:
@ -244,7 +240,7 @@ def remove_elec_base_techs(n):
for c in n.iterate_components(snakemake.config["pypsa_eur"]): for c in n.iterate_components(snakemake.config["pypsa_eur"]):
to_keep = snakemake.config["pypsa_eur"][c.name] to_keep = snakemake.config["pypsa_eur"][c.name]
to_remove = pd.Index(c.df.carrier.unique())^to_keep to_remove = pd.Index(c.df.carrier.unique()).symmetric_difference(to_keep)
print("Removing",c.list_name,"with carrier",to_remove) print("Removing",c.list_name,"with carrier",to_remove)
names = c.df.index[c.df.carrier.isin(to_remove)] names = c.df.index[c.df.carrier.isin(to_remove)]
print(names) print(names)
@ -933,7 +929,7 @@ def add_storage(network):
# hydrogen stored overground # hydrogen stored overground
h2_capital_cost = costs.at["hydrogen storage tank", "fixed"] h2_capital_cost = costs.at["hydrogen storage tank", "fixed"]
nodes_overground = nodes ^ cavern_nodes.index nodes_overground = nodes.symmetric_difference(cavern_nodes.index)
network.madd("Store", network.madd("Store",
nodes_overground + " H2 Store", nodes_overground + " H2 Store",
@ -1165,6 +1161,13 @@ def add_land_transport(network):
carrier="land transport oil", carrier="land transport oil",
p_set=ice_share/options['transport_internal_combustion_efficiency']*transport[nodes]) p_set=ice_share/options['transport_internal_combustion_efficiency']*transport[nodes])
co2 = ice_share/options['transport_internal_combustion_efficiency']*transport[nodes].sum().sum()/8760.*costs.at["oil",'CO2 intensity']
network.madd("Load",
["land transport oil emissions"],
bus="co2 atmosphere",
carrier="land transport oil emissions",
p_set=-co2)
def add_heat(network): def add_heat(network):
@ -1488,7 +1491,7 @@ def create_nodes_for_heat_sector():
else: else:
nodes[sector + " urban decentral"] = pop_layout.index nodes[sector + " urban decentral"] = pop_layout.index
# for central nodes, residential and services are aggregated # for central nodes, residential and services are aggregated
nodes["urban central"] = pop_layout.index ^ nodes["residential urban decentral"] nodes["urban central"] = pop_layout.index.symmetric_difference(nodes["residential urban decentral"])
return nodes return nodes
@ -1874,7 +1877,7 @@ if __name__ == "__main__":
opts='', planning_horizons='2020', opts='', planning_horizons='2020',
sector_opts='120H-T-H-B-I-onwind+p3-dist1-cb48be3'), sector_opts='120H-T-H-B-I-onwind+p3-dist1-cb48be3'),
input=dict( network='../pypsa-eur/networks/{network}_s{simpl}_{clusters}_ec_lv{lv}_{opts}.nc', input=dict( network='../pypsa-eur/networks/elec_s{simpl}_{clusters}_ec_lv{lv}_{opts}.nc',
energy_totals_name='resources/energy_totals.csv', energy_totals_name='resources/energy_totals.csv',
co2_totals_name='resources/co2_totals.csv', co2_totals_name='resources/co2_totals.csv',
transport_name='resources/transport_data.csv', transport_name='resources/transport_data.csv',
@ -1886,34 +1889,33 @@ if __name__ == "__main__":
h2_cavern = "data/hydrogen_salt_cavern_potentials.csv", h2_cavern = "data/hydrogen_salt_cavern_potentials.csv",
profile_offwind_ac="../pypsa-eur/resources/profile_offwind-ac.nc", profile_offwind_ac="../pypsa-eur/resources/profile_offwind-ac.nc",
profile_offwind_dc="../pypsa-eur/resources/profile_offwind-dc.nc", profile_offwind_dc="../pypsa-eur/resources/profile_offwind-dc.nc",
busmap_s="../pypsa-eur/resources/busmap_{network}_s{simpl}.csv", busmap_s="../pypsa-eur/resources/busmap_elec_s{simpl}.csv",
busmap="../pypsa-eur/resources/busmap_{network}_s{simpl}_{clusters}.csv", busmap="../pypsa-eur/resources/busmap_elec_s{simpl}_{clusters}.csv",
clustered_pop_layout="resources/pop_layout_{network}_s{simpl}_{clusters}.csv", clustered_pop_layout="resources/pop_layout_elec_s{simpl}_{clusters}.csv",
simplified_pop_layout="resources/pop_layout_{network}_s{simpl}.csv", simplified_pop_layout="resources/pop_layout_elec_s{simpl}.csv",
industrial_demand="resources/industrial_energy_demand_{network}_s{simpl}_{clusters}.csv", industrial_demand="resources/industrial_energy_demand_elec_s{simpl}_{clusters}.csv",
heat_demand_urban="resources/heat_demand_urban_{network}_s{simpl}_{clusters}.nc", heat_demand_urban="resources/heat_demand_urban_elec_s{simpl}_{clusters}.nc",
heat_demand_rural="resources/heat_demand_rural_{network}_s{simpl}_{clusters}.nc", heat_demand_rural="resources/heat_demand_rural_elec_s{simpl}_{clusters}.nc",
heat_demand_total="resources/heat_demand_total_{network}_s{simpl}_{clusters}.nc", heat_demand_total="resources/heat_demand_total_elec_s{simpl}_{clusters}.nc",
temp_soil_total="resources/temp_soil_total_{network}_s{simpl}_{clusters}.nc", temp_soil_total="resources/temp_soil_total_elec_s{simpl}_{clusters}.nc",
temp_soil_rural="resources/temp_soil_rural_{network}_s{simpl}_{clusters}.nc", temp_soil_rural="resources/temp_soil_rural_elec_s{simpl}_{clusters}.nc",
temp_soil_urban="resources/temp_soil_urban_{network}_s{simpl}_{clusters}.nc", temp_soil_urban="resources/temp_soil_urban_elec_s{simpl}_{clusters}.nc",
temp_air_total="resources/temp_air_total_{network}_s{simpl}_{clusters}.nc", temp_air_total="resources/temp_air_total_elec_s{simpl}_{clusters}.nc",
temp_air_rural="resources/temp_air_rural_{network}_s{simpl}_{clusters}.nc", temp_air_rural="resources/temp_air_rural_elec_s{simpl}_{clusters}.nc",
temp_air_urban="resources/temp_air_urban_{network}_s{simpl}_{clusters}.nc", temp_air_urban="resources/temp_air_urban_elec_s{simpl}_{clusters}.nc",
cop_soil_total="resources/cop_soil_total_{network}_s{simpl}_{clusters}.nc", cop_soil_total="resources/cop_soil_total_elec_s{simpl}_{clusters}.nc",
cop_soil_rural="resources/cop_soil_rural_{network}_s{simpl}_{clusters}.nc", cop_soil_rural="resources/cop_soil_rural_elec_s{simpl}_{clusters}.nc",
cop_soil_urban="resources/cop_soil_urban_{network}_s{simpl}_{clusters}.nc", cop_soil_urban="resources/cop_soil_urban_elec_s{simpl}_{clusters}.nc",
cop_air_total="resources/cop_air_total_{network}_s{simpl}_{clusters}.nc", cop_air_total="resources/cop_air_total_elec_s{simpl}_{clusters}.nc",
cop_air_rural="resources/cop_air_rural_{network}_s{simpl}_{clusters}.nc", cop_air_rural="resources/cop_air_rural_elec_s{simpl}_{clusters}.nc",
cop_air_urban="resources/cop_air_urban_{network}_s{simpl}_{clusters}.nc", cop_air_urban="resources/cop_air_urban_elec_s{simpl}_{clusters}.nc",
solar_thermal_total="resources/solar_thermal_total_{network}_s{simpl}_{clusters}.nc", solar_thermal_total="resources/solar_thermal_total_elec_s{simpl}_{clusters}.nc",
solar_thermal_urban="resources/solar_thermal_urban_{network}_s{simpl}_{clusters}.nc", solar_thermal_urban="resources/solar_thermal_urban_elec_s{simpl}_{clusters}.nc",
solar_thermal_rural="resources/solar_thermal_rural_{network}_s{simpl}_{clusters}.nc", solar_thermal_rural="resources/solar_thermal_rural_elec_s{simpl}_{clusters}.nc",
retro_cost_energy = "resources/retro_cost_{network}_s{simpl}_{clusters}.csv", retro_cost_energy = "resources/retro_cost_elec_s{simpl}_{clusters}.csv",
floor_area = "resources/floor_area_{network}_s{simpl}_{clusters}.csv" floor_area = "resources/floor_area_elec_s{simpl}_{clusters}.csv"
), ),
output=['results/version-cb48be3/prenetworks/{network}_s{simpl}_{clusters}_lv{lv}__{sector_opts}_{planning_horizons}.nc'] output=['results/version-cb48be3/prenetworks/{network}_s{simpl}_{clusters}_lv{lv}__{sector_opts}_{planning_horizons}.nc']
) )
import yaml import yaml
with open('config.yaml', encoding='utf8') as f: with open('config.yaml', encoding='utf8') as f:

View File

@ -376,10 +376,10 @@ if __name__ == "__main__":
wildcards=dict(network='elec', simpl='', clusters='39', lv='1.0', wildcards=dict(network='elec', simpl='', clusters='39', lv='1.0',
sector_opts='Co2L0-168H-T-H-B-I-solar3-dist1', sector_opts='Co2L0-168H-T-H-B-I-solar3-dist1',
co2_budget_name='b30b3', planning_horizons='2050'), co2_budget_name='b30b3', planning_horizons='2050'),
input=dict(network="pypsa-eur-sec/results/test/prenetworks_brownfield/{network}_s{simpl}_{clusters}_lv{lv}__{sector_opts}_{co2_budget_name}_{planning_horizons}.nc"), input=dict(network="pypsa-eur-sec/results/test/prenetworks_brownfield/elec_s{simpl}_{clusters}_lv{lv}__{sector_opts}_{co2_budget_name}_{planning_horizons}.nc"),
output=["results/networks/s{simpl}_{clusters}_lv{lv}_{sector_opts}_{co2_budget_name}_{planning_horizons}-test.nc"], output=["results/networks/s{simpl}_{clusters}_lv{lv}_{sector_opts}_{co2_budget_name}_{planning_horizons}-test.nc"],
log=dict(gurobi="logs/{network}_s{simpl}_{clusters}_lv{lv}_{sector_opts}_{co2_budget_name}_{planning_horizons}_gurobi-test.log", log=dict(gurobi="logs/elec_s{simpl}_{clusters}_lv{lv}_{sector_opts}_{co2_budget_name}_{planning_horizons}_gurobi-test.log",
python="logs/{network}_s{simpl}_{clusters}_lv{lv}_{sector_opts}_{co2_budget_name}_{planning_horizons}_python-test.log") python="logs/elec_s{simpl}_{clusters}_lv{lv}_{sector_opts}_{co2_budget_name}_{planning_horizons}_python-test.log")
) )
import yaml import yaml
with open('config.yaml', encoding='utf8') as f: with open('config.yaml', encoding='utf8') as f: