Merge branch 'master' into describe-config

This commit is contained in:
virio-andreyana 2023-07-02 23:58:55 +02:00 committed by GitHub
commit b2c4a5a4cb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
48 changed files with 743 additions and 491 deletions

View File

@ -30,7 +30,7 @@ repos:
# Find common spelling mistakes in comments and docstrings
- repo: https://github.com/codespell-project/codespell
rev: v2.2.4
rev: v2.2.5
hooks:
- id: codespell
args: ['--ignore-regex="(\b[A-Z]+\b)"', '--ignore-words-list=fom,appartment,bage,ore,setis,tabacco,berfore'] # Ignore capital case words, e.g. country codes
@ -39,7 +39,7 @@ repos:
# Make docstrings PEP 257 compliant
- repo: https://github.com/PyCQA/docformatter
rev: v1.7.2
rev: v1.7.3
hooks:
- id: docformatter
args: ["--in-place", "--make-summary-multi-line", "--pre-summary-newline"]
@ -87,6 +87,6 @@ repos:
# Check for FSFE REUSE compliance (licensing)
- repo: https://github.com/fsfe/reuse-tool
rev: v1.1.2
rev: v2.0.0
hooks:
- id: reuse

View File

@ -767,6 +767,11 @@ plotting:
solar: "#f9d002"
solar PV: "#f9d002"
solar thermal: '#ffbf2b'
residential rural solar thermal: '#f1c069'
services rural solar thermal: '#eabf61'
residential urban decentral solar thermal: '#e5bc5a'
services urban decentral solar thermal: '#dfb953'
urban central solar thermal: '#d7b24c'
solar rooftop: '#ffea80'
# gas
OCGT: '#e0986c'
@ -775,9 +780,15 @@ plotting:
gas boiler: '#db6a25'
gas boilers: '#db6a25'
gas boiler marginal: '#db6a25'
residential rural gas boiler: '#d4722e'
residential urban decentral gas boiler: '#cb7a36'
services rural gas boiler: '#c4813f'
services urban decentral gas boiler: '#ba8947'
urban central gas boiler: '#b0904f'
gas: '#e05b09'
fossil gas: '#e05b09'
natural gas: '#e05b09'
biogas to gas: '#e36311'
CCGT: '#a85522'
CCGT marginal: '#a85522'
allam: '#B98F76'
@ -790,6 +801,11 @@ plotting:
# oil
oil: '#c9c9c9'
oil boiler: '#adadad'
residential rural oil boiler: '#a9a9a9'
services rural oil boiler: '#a5a5a5'
residential urban decentral oil boiler: '#a1a1a1'
urban central oil boiler: '#9d9d9d'
services urban decentral oil boiler: '#999999'
agriculture machinery oil: '#949494'
shipping oil: "#808080"
land transport oil: '#afafaf'
@ -815,13 +831,20 @@ plotting:
solid biomass for industry CC: '#47411c'
solid biomass for industry co2 from atmosphere: '#736412'
solid biomass for industry co2 to stored: '#47411c'
urban central solid biomass CHP: '#9d9042'
urban central solid biomass CHP CC: '#6c5d28'
biomass boiler: '#8A9A5B'
residential rural biomass boiler: '#a1a066'
residential urban decentral biomass boiler: '#b0b87b'
services rural biomass boiler: '#c6cf98'
services urban decentral biomass boiler: '#dde5b5'
biomass to liquid: '#32CD32'
BioSNG: '#123456'
# power transmission
lines: '#6c9459'
transmission lines: '#6c9459'
electricity distribution grid: '#97ad8c'
low voltage: '#97ad8c'
# electricity demand
Electric load: '#110d63'
electric demand: '#110d63'
@ -832,24 +855,48 @@ plotting:
# battery + EVs
battery: '#ace37f'
battery storage: '#ace37f'
battery charger: '#88a75b'
battery discharger: '#5d4e29'
home battery: '#80c944'
home battery storage: '#80c944'
home battery charger: '#5e8032'
home battery discharger: '#3c5221'
BEV charger: '#baf238'
V2G: '#e5ffa8'
land transport EV: '#baf238'
Li ion: '#baf238'
# hot water storage
water tanks: '#e69487'
residential rural water tanks: '#f7b7a3'
services rural water tanks: '#f3afa3'
residential urban decentral water tanks: '#f2b2a3'
services urban decentral water tanks: '#f1b4a4'
urban central water tanks: '#e9977d'
hot water storage: '#e69487'
hot water charging: '#e69487'
hot water discharging: '#e69487'
hot water charging: '#e8998b'
urban central water tanks charger: '#b57a67'
residential rural water tanks charger: '#b4887c'
residential urban decentral water tanks charger: '#b39995'
services rural water tanks charger: '#b3abb0'
services urban decentral water tanks charger: '#b3becc'
hot water discharging: '#e99c8e'
urban central water tanks discharger: '#b9816e'
residential rural water tanks discharger: '#ba9685'
residential urban decentral water tanks discharger: '#baac9e'
services rural water tanks discharger: '#bbc2b8'
services urban decentral water tanks discharger: '#bdd8d3'
# heat demand
Heat load: '#cc1f1f'
heat: '#cc1f1f'
heat demand: '#cc1f1f'
rural heat: '#ff5c5c'
residential rural heat: '#ff7c7c'
services rural heat: '#ff9c9c'
central heat: '#cc1f1f'
urban central heat: '#d15959'
decentral heat: '#750606'
residential urban decentral heat: '#a33c3c'
services urban decentral heat: '#cc1f1f'
low-temperature heat for industry: '#8f2727'
process heat: '#ff0000'
agriculture heat: '#d9a5a5'
@ -857,14 +904,26 @@ plotting:
heat pumps: '#2fb537'
heat pump: '#2fb537'
air heat pump: '#36eb41'
residential urban decentral air heat pump: '#48f74f'
services urban decentral air heat pump: '#5af95d'
urban central air heat pump: '#6cfb6b'
ground heat pump: '#2fb537'
residential rural ground heat pump: '#48f74f'
services rural ground heat pump: '#5af95d'
Ambient: '#98eb9d'
CHP: '#8a5751'
urban central gas CHP: '#8d5e56'
CHP CC: '#634643'
urban central gas CHP CC: '#6e4e4c'
CHP heat: '#8a5751'
CHP electric: '#8a5751'
district heating: '#e8beac'
resistive heater: '#d8f9b8'
residential rural resistive heater: '#bef5b5'
residential urban decentral resistive heater: '#b2f1a9'
services rural resistive heater: '#a5ed9d'
services urban decentral resistive heater: '#98e991'
urban central resistive heater: '#8cdf85'
retrofitting: '#8487e8'
building retrofitting: '#8487e8'
# hydrogen
@ -876,13 +935,16 @@ plotting:
SMR CC: '#4f1745'
H2 liquefaction: '#d647bd'
hydrogen storage: '#bf13a0'
H2 Store: '#bf13a0'
H2 storage: '#bf13a0'
land transport fuel cell: '#6b3161'
H2 pipeline: '#f081dc'
H2 pipeline retrofitted: '#ba99b5'
H2 Fuel Cell: '#c251ae'
H2 fuel cell: '#c251ae'
H2 turbine: '#991f83'
H2 Electrolysis: '#ff29d9'
H2 electrolysis: '#ff29d9'
# ammonia
NH3: '#46caf0'
ammonia: '#46caf0'
@ -931,9 +993,11 @@ plotting:
waste: '#e3d37d'
other: '#000000'
geothermal: '#ba91b1'
AC: "#70af1d"
AC-AC: "#70af1d"
AC line: "#70af1d"
links: "#8a1caf"
HVDC links: "#8a1caf"
DC: "#8a1caf"
DC-DC: "#8a1caf"
DC link: "#8a1caf"

View File

@ -31,6 +31,14 @@ snapshots:
end: "2013-03-08"
electricity:
co2limit: 100.e+6
extendable_carriers:
Generator: [OCGT]
StorageUnit: [battery]
Store: [H2]
Link: [H2 pipeline]
renewable_carriers: [solar, onwind, offwind-ac, offwind-dc]
atlite:

View File

@ -28,6 +28,14 @@ snapshots:
end: "2013-03-08"
electricity:
co2limit: 100.e+6
extendable_carriers:
Generator: [OCGT]
StorageUnit: [battery]
Store: [H2]
Link: [H2 pipeline]
renewable_carriers: [solar, onwind, offwind-ac, offwind-dc]
atlite:

View File

@ -10,11 +10,14 @@ Release Notes
Upcoming Release
================
* ``param:`` section in rule definition are added to track changed settings in ``config.yaml``. The goal is to automatically re-execute rules whose parameters have changed. See `Non-file parameters for rules <https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#non-file-parameters-for-rules>`_ in the snakemake documentation.
* **Important:** The configuration files are now located in the ``config`` directory. This counts for ``config.default.yaml``, ``config.yaml`` as well as the test configuration files which are now located in ``config/test``. Config files that are still in the root directory will be ignored.
* Bugfix: Correct typo in the CPLEX solver configuration in ``config.default.yaml``.
* Bugfix: Error in ``add_electricity`` where carriers were added multiple times to the network, resulting in a non-unique carriers error.
* Renamed script file from PyPSA-EUR ``build_load_data`` to ``build_electricity_demand`` and ``retrieve_load_data`` to ``retrieve_electricity_demand``.
* Fix docs readthedocs built
@ -23,12 +26,17 @@ Upcoming Release
hydrogen fuel cell. Add switches for both re-electrification options under
``sector: hydrogen_turbine:`` and ``sector: hydrogen_fuel_cell:``.
* A new function named ``sanitize_carrier`` ensures that all unique carrier names are present in the network's carriers attribute, and adds nice names and colors for each carrier according to the provided configuration dictionary.
* Additional tech_color are added to include previously unlisted carriers.
* Remove ``vresutils`` dependency.
* Add option to include a piecewise linear approximation of transmission losses,
e.g. by setting ``solving: options: transmission_losses: 2`` for an
approximation with two tangents.
PyPSA-Eur 0.8.0 (18th March 2023)
=================================

View File

@ -19,6 +19,10 @@ if config["enable"].get("prepare_links_p_nom", False):
rule build_electricity_demand:
params:
snapshots=config["snapshots"],
countries=config["countries"],
load=config["load"],
input:
ancient("data/load_raw.csv"),
output:
@ -34,6 +38,10 @@ rule build_electricity_demand:
rule build_powerplants:
params:
powerplants_filter=config["electricity"]["powerplants_filter"],
custom_powerplants=config["electricity"]["custom_powerplants"],
countries=config["countries"],
input:
base_network=RESOURCES + "networks/base.nc",
custom_powerplants="data/custom_powerplants.csv",
@ -51,6 +59,9 @@ rule build_powerplants:
rule base_network:
params:
countries=config["countries"],
snapshots=config["snapshots"],
input:
eg_buses="data/entsoegridkit/buses.csv",
eg_lines="data/entsoegridkit/lines.csv",
@ -79,6 +90,8 @@ rule base_network:
rule build_shapes:
params:
countries=config["countries"],
input:
naturalearth=ancient("data/bundle/naturalearth/ne_10m_admin_0_countries.shp"),
eez=ancient("data/bundle/eez/World_EEZ_v8_2014.shp"),
@ -104,6 +117,8 @@ rule build_shapes:
rule build_bus_regions:
params:
countries=config["countries"],
input:
country_shapes=RESOURCES + "country_shapes.geojson",
offshore_shapes=RESOURCES + "offshore_shapes.geojson",
@ -125,6 +140,9 @@ rule build_bus_regions:
if config["enable"].get("build_cutout", False):
rule build_cutout:
params:
snapshots=config["snapshots"],
cutouts=config["atlite"]["cutouts"],
input:
regions_onshore=RESOURCES + "regions_onshore.geojson",
regions_offshore=RESOURCES + "regions_offshore.geojson",
@ -186,6 +204,8 @@ rule build_ship_raster:
rule build_renewable_profiles:
params:
renewable=config["renewable"],
input:
base_network=RESOURCES + "networks/base.nc",
corine=ancient("data/bundle/corine/g250_clc06_V18_5.tif"),
@ -235,6 +255,9 @@ rule build_renewable_profiles:
rule build_hydro_profile:
params:
hydro=config["renewable"]["hydro"],
countries=config["countries"],
input:
country_shapes=RESOURCES + "country_shapes.geojson",
eia_hydro_generation="data/eia_hydro_annual_generation.csv",
@ -252,6 +275,14 @@ rule build_hydro_profile:
rule add_electricity:
params:
length_factor=config["lines"]["length_factor"],
scaling_factor=config["load"]["scaling_factor"],
countries=config["countries"],
renewable=config["renewable"],
electricity=config["electricity"],
conventional=config.get("conventional", {}),
costs=config["costs"],
input:
**{
f"profile_{tech}": RESOURCES + f"profile_{tech}.nc"
@ -287,6 +318,15 @@ rule add_electricity:
rule simplify_network:
params:
simplify_network=config["clustering"]["simplify_network"],
aggregation_strategies=config["clustering"].get("aggregation_strategies", {}),
focus_weights=config.get("focus_weights", None),
renewable_carriers=config["electricity"]["renewable_carriers"],
max_hours=config["electricity"]["max_hours"],
length_factor=config["lines"]["length_factor"],
p_max_pu=config["links"].get("p_max_pu", 1.0),
costs=config["costs"],
input:
network=RESOURCES + "networks/elec.nc",
tech_costs=COSTS,
@ -312,6 +352,16 @@ rule simplify_network:
rule cluster_network:
params:
cluster_network=config["clustering"]["cluster_network"],
aggregation_strategies=config["clustering"].get("aggregation_strategies", {}),
custom_busmap=config["enable"].get("custom_busmap", False),
focus_weights=config.get("focus_weights", None),
renewable_carriers=config["electricity"]["renewable_carriers"],
conventional_carriers=config["electricity"].get("conventional_carriers", []),
max_hours=config["electricity"]["max_hours"],
length_factor=config["lines"]["length_factor"],
costs=config["costs"],
input:
network=RESOURCES + "networks/elec_s{simpl}.nc",
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}.geojson",
@ -343,6 +393,10 @@ rule cluster_network:
rule add_extra_components:
params:
extendable_carriers=config["electricity"]["extendable_carriers"],
max_hours=config["electricity"]["max_hours"],
costs=config["costs"],
input:
network=RESOURCES + "networks/elec_s{simpl}_{clusters}.nc",
tech_costs=COSTS,
@ -362,6 +416,14 @@ rule add_extra_components:
rule prepare_network:
params:
links=config["links"],
lines=config["lines"],
co2base=config["electricity"]["co2base"],
co2limit=config["electricity"]["co2limit"],
gaslimit=config["electricity"].get("gaslimit"),
max_hours=config["electricity"]["max_hours"],
costs=config["costs"],
input:
RESOURCES + "networks/elec_s{simpl}_{clusters}_ec.nc",
tech_costs=COSTS,

View File

@ -140,6 +140,8 @@ if not (config["sector"]["gas_network"] or config["sector"]["H2_retrofit"]):
rule build_heat_demands:
params:
snapshots=config["snapshots"],
input:
pop_layout=RESOURCES + "pop_layout_{scope}.nc",
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
@ -160,6 +162,8 @@ rule build_heat_demands:
rule build_temperature_profiles:
params:
snapshots=config["snapshots"],
input:
pop_layout=RESOURCES + "pop_layout_{scope}.nc",
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
@ -181,6 +185,8 @@ rule build_temperature_profiles:
rule build_cop_profiles:
params:
heat_pump_sink_T=config["sector"]["heat_pump_sink_T"],
input:
temp_soil_total=RESOURCES + "temp_soil_total_elec_s{simpl}_{clusters}.nc",
temp_soil_rural=RESOURCES + "temp_soil_rural_elec_s{simpl}_{clusters}.nc",
@ -208,6 +214,9 @@ rule build_cop_profiles:
rule build_solar_thermal_profiles:
params:
snapshots=config["snapshots"],
solar_thermal=config["solar_thermal"],
input:
pop_layout=RESOURCES + "pop_layout_{scope}.nc",
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
@ -228,6 +237,9 @@ rule build_solar_thermal_profiles:
rule build_energy_totals:
params:
countries=config["countries"],
energy=config["energy"],
input:
nuts3_shapes=RESOURCES + "nuts3_shapes.geojson",
co2="data/eea/UNFCCC_v23.csv",
@ -253,6 +265,8 @@ rule build_energy_totals:
rule build_biomass_potentials:
params:
biomass=config["biomass"],
input:
enspreso_biomass=HTTP.remote(
"https://cidportal.jrc.ec.europa.eu/ftp/jrc-opendata/ENSPRESO/ENSPRESO_BIOMASS.xlsx",
@ -315,6 +329,10 @@ if not config["sector"]["biomass_transport"]:
if config["sector"]["regional_co2_sequestration_potential"]["enable"]:
rule build_sequestration_potentials:
params:
sequestration_potential=config["sector"][
"regional_co2_sequestration_potential"
],
input:
sequestration_potential=HTTP.remote(
"https://raw.githubusercontent.com/ericzhou571/Co2Storage/main/resources/complete_map_2020_unit_Mt.geojson",
@ -368,6 +386,8 @@ rule build_salt_cavern_potentials:
rule build_ammonia_production:
params:
countries=config["countries"],
input:
usgs="data/myb1-2017-nitro.xls",
output:
@ -386,6 +406,9 @@ rule build_ammonia_production:
rule build_industry_sector_ratios:
params:
industry=config["industry"],
ammonia=config["sector"].get("ammonia", False),
input:
ammonia_production=RESOURCES + "ammonia_production.csv",
idees="data/jrc-idees-2015",
@ -405,6 +428,9 @@ rule build_industry_sector_ratios:
rule build_industrial_production_per_country:
params:
industry=config["industry"],
countries=config["countries"],
input:
ammonia_production=RESOURCES + "ammonia_production.csv",
jrc="data/jrc-idees-2015",
@ -426,6 +452,8 @@ rule build_industrial_production_per_country:
rule build_industrial_production_per_country_tomorrow:
params:
industry=config["industry"],
input:
industrial_production_per_country=RESOURCES
+ "industrial_production_per_country.csv",
@ -450,6 +478,9 @@ rule build_industrial_production_per_country_tomorrow:
rule build_industrial_distribution_key:
params:
hotmaps_locate_missing=config["industry"].get("hotmaps_locate_missing", False),
countries=config["countries"],
input:
regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson",
clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv",
@ -524,6 +555,9 @@ rule build_industrial_energy_demand_per_node:
rule build_industrial_energy_demand_per_country_today:
params:
countries=config["countries"],
industry=config["industry"],
input:
jrc="data/jrc-idees-2015",
ammonia_production=RESOURCES + "ammonia_production.csv",
@ -570,6 +604,9 @@ rule build_industrial_energy_demand_per_node_today:
if config["sector"]["retrofitting"]["retro_endogen"]:
rule build_retro_cost:
params:
retrofitting=config["sector"]["retrofitting"],
countries=config["countries"],
input:
building_stock="data/retro/data_building_stock.csv",
data_tabula="data/retro/tabula-calculator-calcsetbuilding.csv",
@ -640,6 +677,9 @@ rule build_shipping_demand:
rule build_transport_demand:
params:
snapshots=config["snapshots"],
sector=config["sector"],
input:
clustered_pop_layout=RESOURCES + "pop_layout_elec_s{simpl}_{clusters}.csv",
pop_weighted_energy_totals=RESOURCES
@ -666,6 +706,18 @@ rule build_transport_demand:
rule prepare_sector_network:
params:
co2_budget=config["co2_budget"],
conventional_carriers=config["existing_capacities"]["conventional_carriers"],
foresight=config["foresight"],
costs=config["costs"],
sector=config["sector"],
industry=config["industry"],
pypsa_eur=config["pypsa_eur"],
length_factor=config["lines"]["length_factor"],
planning_horizons=config["scenario"]["planning_horizons"],
countries=config["countries"],
emissions_scope=config["energy"]["emissions"],
eurostat_report_year=config["energy"]["eurostat_report_year"],
RDIR=RDIR,
input:
**build_retro_cost_output,

View File

@ -9,6 +9,9 @@ localrules:
rule plot_network:
params:
foresight=config["foresight"],
plotting=config["plotting"],
input:
overrides="data/override_component_attrs",
network=RESULTS
@ -67,6 +70,10 @@ rule copy_conda_env:
rule make_summary:
params:
foresight=config["foresight"],
costs=config["costs"],
snapshots=config["snapshots"],
scenario=config["scenario"],
RDIR=RDIR,
input:
overrides="data/override_component_attrs",
@ -114,6 +121,10 @@ rule make_summary:
rule plot_summary:
params:
countries=config["countries"],
planning_horizons=config["scenario"]["planning_horizons"],
sector_opts=config["scenario"]["sector_opts"],
plotting=config["plotting"],
RDIR=RDIR,
input:
costs=RESULTS + "csvs/costs.csv",

View File

@ -4,6 +4,13 @@
rule solve_network:
params:
solving=config["solving"],
foresight=config["foresight"],
planning_horizons=config["scenario"]["planning_horizons"],
co2_sequestration_potential=config["sector"].get(
"co2_sequestration_potential", 200
),
input:
network=RESOURCES + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
output:
@ -28,6 +35,8 @@ rule solve_network:
rule solve_operations_network:
params:
options=config["solving"]["options"],
input:
network=RESULTS + "networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
output:

View File

@ -4,6 +4,11 @@
rule add_existing_baseyear:
params:
baseyear=config["scenario"]["planning_horizons"][0],
sector=config["sector"],
existing_capacities=config["existing_capacities"],
costs=config["costs"],
input:
overrides="data/override_component_attrs",
network=RESULTS
@ -42,6 +47,10 @@ rule add_existing_baseyear:
rule add_brownfield:
params:
H2_retrofit=config["sector"]["H2_retrofit"],
H2_retrofit_capacity_per_CH4=config["sector"]["H2_retrofit_capacity_per_CH4"],
threshold_capacity=config["existing_capacities"]["threshold_capacity"],
input:
overrides="data/override_component_attrs",
network=RESULTS
@ -74,6 +83,13 @@ ruleorder: add_existing_baseyear > add_brownfield
rule solve_sector_network_myopic:
params:
solving=config["solving"],
foresight=config["foresight"],
planning_horizons=config["scenario"]["planning_horizons"],
co2_sequestration_potential=config["sector"].get(
"co2_sequestration_potential", 200
),
input:
overrides="data/override_component_attrs",
network=RESULTS

View File

@ -4,6 +4,13 @@
rule solve_sector_network:
params:
solving=config["solving"],
foresight=config["foresight"],
planning_horizons=config["scenario"]["planning_horizons"],
co2_sequestration_potential=config["sector"].get(
"co2_sequestration_potential", 200
),
input:
overrides="data/override_component_attrs",
network=RESULTS

View File

@ -82,7 +82,7 @@ def load_network(import_name=None, custom_components=None):
As in pypsa.Network(import_name)
custom_components : dict
Dictionary listing custom components.
For using ``snakemake.config['override_components']``
For using ``snakemake.params['override_components']``
in ``config/config.yaml`` define:
.. code:: yaml
@ -283,7 +283,7 @@ def get_aggregation_strategies(aggregation_strategies):
# when custom values are specified in the config.
import numpy as np
from pypsa.networkclustering import _make_consense
from pypsa.clustering.spatial import _make_consense
bus_strategies = dict(country=_make_consense("Bus", "country"))
bus_strategies.update(aggregation_strategies.get("buses", {}))

View File

@ -49,7 +49,7 @@ def add_brownfield(n, n_p, year):
)
]
threshold = snakemake.config["existing_capacities"]["threshold_capacity"]
threshold = snakemake.params.threshold_capacity
if not chp_heat.empty:
threshold_chp_heat = (
@ -87,7 +87,7 @@ def add_brownfield(n, n_p, year):
# deal with gas network
pipe_carrier = ["gas pipeline"]
if snakemake.config["sector"]["H2_retrofit"]:
if snakemake.params.H2_retrofit:
# drop capacities of previous year to avoid duplicating
to_drop = n.links.carrier.isin(pipe_carrier) & (n.links.build_year != year)
n.mremove("Link", n.links.loc[to_drop].index)
@ -98,7 +98,7 @@ def add_brownfield(n, n_p, year):
& (n.links.build_year != year)
].index
gas_pipes_i = n.links[n.links.carrier.isin(pipe_carrier)].index
CH4_per_H2 = 1 / snakemake.config["sector"]["H2_retrofit_capacity_per_CH4"]
CH4_per_H2 = 1 / snakemake.params.H2_retrofit_capacity_per_CH4
fr = "H2 pipeline retrofitted"
to = "gas pipeline"
# today's pipe capacity

View File

@ -123,21 +123,71 @@ def calculate_annuity(n, r):
return 1 / n
def _add_missing_carriers_from_costs(n, costs, carriers):
missing_carriers = pd.Index(carriers).difference(n.carriers.index)
if missing_carriers.empty:
return
def add_missing_carriers(n, carriers):
"""
Function to add missing carriers to the network without raising errors.
"""
missing_carriers = set(carriers) - set(n.carriers.index)
if len(missing_carriers) > 0:
n.madd("Carrier", missing_carriers)
emissions_cols = (
costs.columns.to_series().loc[lambda s: s.str.endswith("_emissions")].values
def sanitize_carriers(n, config):
"""
Sanitize the carrier information in a PyPSA Network object.
The function ensures that all unique carrier names are present in the network's
carriers attribute, and adds nice names and colors for each carrier according
to the provided configuration dictionary.
Parameters
----------
n : pypsa.Network
A PyPSA Network object that represents an electrical power system.
config : dict
A dictionary containing configuration information, specifically the
"plotting" key with "nice_names" and "tech_colors" keys for carriers.
Returns
-------
None
The function modifies the 'n' PyPSA Network object in-place, updating the
carriers attribute with nice names and colors.
Warnings
--------
Raises a warning if any carrier's "tech_colors" are not defined in the config dictionary.
"""
for c in n.iterate_components():
if "carrier" in c.df:
add_missing_carriers(n, c.df)
carrier_i = n.carriers.index
nice_names = (
pd.Series(config["plotting"]["nice_names"])
.reindex(carrier_i)
.fillna(carrier_i.to_series().str.title())
)
suptechs = missing_carriers.str.split("-").str[0]
emissions = costs.loc[suptechs, emissions_cols].fillna(0.0)
emissions.index = missing_carriers
n.import_components_from_dataframe(emissions, "Carrier")
n.carriers["nice_name"] = n.carriers.nice_name.where(
n.carriers.nice_name != "", nice_names
)
colors = pd.Series(config["plotting"]["tech_colors"]).reindex(carrier_i)
if colors.isna().any():
missing_i = list(colors.index[colors.isna()])
logger.warning(f"tech_colors for carriers {missing_i} not defined in config.")
n.carriers["color"] = n.carriers.color.where(n.carriers.color != "", colors)
def load_costs(tech_costs, config, elec_config, Nyears=1.0):
def add_co2_emissions(n, costs, carriers):
"""
Add CO2 emissions to the network's carriers attribute.
"""
suptechs = n.carriers.loc[carriers].index.str.split("-").str[0]
n.carriers.loc[carriers, "co2_emissions"] = costs.co2_emissions[suptechs].values
def load_costs(tech_costs, config, max_hours, Nyears=1.0):
# set all asset costs and other parameters
costs = pd.read_csv(tech_costs, index_col=[0, 1]).sort_index()
@ -180,7 +230,6 @@ def load_costs(tech_costs, config, elec_config, Nyears=1.0):
dict(capital_cost=capital_cost, marginal_cost=0.0, co2_emissions=0.0)
)
max_hours = elec_config["max_hours"]
costs.loc["battery"] = costs_for_storage(
costs.loc["battery storage"],
costs.loc["battery inverter"],
@ -310,57 +359,56 @@ def update_transmission_costs(n, costs, length_factor=1.0):
def attach_wind_and_solar(
n, costs, input_profiles, technologies, extendable_carriers, line_length_factor=1
n, costs, input_profiles, carriers, extendable_carriers, line_length_factor=1
):
# TODO: rename tech -> carrier, technologies -> carriers
_add_missing_carriers_from_costs(n, costs, technologies)
add_missing_carriers(n, carriers)
for tech in technologies:
if tech == "hydro":
for car in carriers:
if car == "hydro":
continue
with xr.open_dataset(getattr(input_profiles, "profile_" + tech)) as ds:
with xr.open_dataset(getattr(input_profiles, "profile_" + car)) as ds:
if ds.indexes["bus"].empty:
continue
suptech = tech.split("-", 2)[0]
if suptech == "offwind":
supcar = car.split("-", 2)[0]
if supcar == "offwind":
underwater_fraction = ds["underwater_fraction"].to_pandas()
connection_cost = (
line_length_factor
* ds["average_distance"].to_pandas()
* (
underwater_fraction
* costs.at[tech + "-connection-submarine", "capital_cost"]
* costs.at[car + "-connection-submarine", "capital_cost"]
+ (1.0 - underwater_fraction)
* costs.at[tech + "-connection-underground", "capital_cost"]
* costs.at[car + "-connection-underground", "capital_cost"]
)
)
capital_cost = (
costs.at["offwind", "capital_cost"]
+ costs.at[tech + "-station", "capital_cost"]
+ costs.at[car + "-station", "capital_cost"]
+ connection_cost
)
logger.info(
"Added connection cost of {:0.0f}-{:0.0f} Eur/MW/a to {}".format(
connection_cost.min(), connection_cost.max(), tech
connection_cost.min(), connection_cost.max(), car
)
)
else:
capital_cost = costs.at[tech, "capital_cost"]
capital_cost = costs.at[car, "capital_cost"]
n.madd(
"Generator",
ds.indexes["bus"],
" " + tech,
" " + car,
bus=ds.indexes["bus"],
carrier=tech,
p_nom_extendable=tech in extendable_carriers["Generator"],
carrier=car,
p_nom_extendable=car in extendable_carriers["Generator"],
p_nom_max=ds["p_nom_max"].to_pandas(),
weight=ds["weight"].to_pandas(),
marginal_cost=costs.at[suptech, "marginal_cost"],
marginal_cost=costs.at[supcar, "marginal_cost"],
capital_cost=capital_cost,
efficiency=costs.at[suptech, "efficiency"],
efficiency=costs.at[supcar, "efficiency"],
p_max_pu=ds["profile"].transpose("time", "bus").to_pandas(),
)
@ -371,11 +419,12 @@ def attach_conventional_generators(
ppl,
conventional_carriers,
extendable_carriers,
conventional_config,
conventional_params,
conventional_inputs,
):
carriers = set(conventional_carriers) | set(extendable_carriers["Generator"])
_add_missing_carriers_from_costs(n, costs, carriers)
carriers = list(set(conventional_carriers) | set(extendable_carriers["Generator"]))
add_missing_carriers(n, carriers)
add_co2_emissions(n, costs, carriers)
ppl = (
ppl.query("carrier in @carriers")
@ -408,12 +457,12 @@ def attach_conventional_generators(
lifetime=(ppl.dateout - ppl.datein).fillna(np.inf),
)
for carrier in conventional_config:
for carrier in conventional_params:
# Generators with technology affected
idx = n.generators.query("carrier == @carrier").index
for attr in list(set(conventional_config[carrier]) & set(n.generators)):
values = conventional_config[carrier][attr]
for attr in list(set(conventional_params[carrier]) & set(n.generators)):
values = conventional_params[carrier][attr]
if f"conventional_{carrier}_{attr}" in conventional_inputs:
# Values affecting generators of technology k country-specific
@ -430,8 +479,9 @@ def attach_conventional_generators(
n.generators.loc[idx, attr] = values
def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **config):
_add_missing_carriers_from_costs(n, costs, carriers)
def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **params):
add_missing_carriers(n, carriers)
add_co2_emissions(n, costs, carriers)
ppl = (
ppl.query('carrier == "hydro"')
@ -485,9 +535,9 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **con
)
if "PHS" in carriers and not phs.empty:
# fill missing max hours to config value and
# fill missing max hours to params value and
# assume no natural inflow due to lack of data
max_hours = config.get("PHS_max_hours", 6)
max_hours = params.get("PHS_max_hours", 6)
phs = phs.replace({"max_hours": {0: max_hours}})
n.madd(
"StorageUnit",
@ -503,7 +553,7 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **con
)
if "hydro" in carriers and not hydro.empty:
hydro_max_hours = config.get("hydro_max_hours")
hydro_max_hours = params.get("hydro_max_hours")
assert hydro_max_hours is not None, "No path for hydro capacities given."
@ -563,7 +613,8 @@ def attach_extendable_generators(n, costs, ppl, carriers):
logger.warning(
"The function `attach_extendable_generators` is deprecated in v0.5.0."
)
_add_missing_carriers_from_costs(n, costs, carriers)
add_missing_carriers(n, carriers)
add_co2_emissions(n, costs, carriers)
for tech in carriers:
if tech.startswith("OCGT"):
@ -645,7 +696,7 @@ def attach_OPSD_renewables(n, tech_map):
buses = n.buses.loc[gens.bus.unique()]
gens_per_bus = gens.groupby("bus").p_nom.count()
caps = map_country_bus(df.query("Fueltype == @fueltype"), buses)
caps = map_country_bus(df.query("Fueltype == @fueltype and lat == lat"), buses)
caps = caps.groupby(["bus"]).Capacity.sum()
caps = caps / gens_per_bus.reindex(caps.index, fill_value=1)
@ -653,16 +704,7 @@ def attach_OPSD_renewables(n, tech_map):
n.generators.p_nom_min.update(gens.bus.map(caps).dropna())
def estimate_renewable_capacities(n, config):
year = config["electricity"]["estimate_renewable_capacities"]["year"]
tech_map = config["electricity"]["estimate_renewable_capacities"][
"technology_mapping"
]
countries = config["countries"]
expansion_limit = config["electricity"]["estimate_renewable_capacities"][
"expansion_limit"
]
def estimate_renewable_capacities(n, year, tech_map, expansion_limit, countries):
if not len(countries) or not len(tech_map):
return
@ -703,21 +745,6 @@ def estimate_renewable_capacities(n, config):
)
def add_nice_carrier_names(n, config):
carrier_i = n.carriers.index
nice_names = (
pd.Series(config["plotting"]["nice_names"])
.reindex(carrier_i)
.fillna(carrier_i.to_series().str.title())
)
n.carriers["nice_name"] = nice_names
colors = pd.Series(config["plotting"]["tech_colors"]).reindex(carrier_i)
if colors.isna().any():
missing_i = list(colors.index[colors.isna()])
logger.warning(f"tech_colors for carriers {missing_i} not defined in config.")
n.carriers["color"] = colors
if __name__ == "__main__":
if "snakemake" not in globals():
from _helpers import mock_snakemake
@ -725,48 +752,33 @@ if __name__ == "__main__":
snakemake = mock_snakemake("add_electricity")
configure_logging(snakemake)
params = snakemake.params
n = pypsa.Network(snakemake.input.base_network)
Nyears = n.snapshot_weightings.objective.sum() / 8760.0
costs = load_costs(
snakemake.input.tech_costs,
snakemake.config["costs"],
snakemake.config["electricity"],
params.costs,
params.electricity["max_hours"],
Nyears,
)
ppl = load_powerplants(snakemake.input.powerplants)
if "renewable_carriers" in snakemake.config["electricity"]:
renewable_carriers = set(snakemake.config["electricity"]["renewable_carriers"])
else:
logger.warning(
"Missing key `renewable_carriers` under config entry `electricity`. "
"In future versions, this will raise an error. "
"Falling back to carriers listed under `renewable`."
)
renewable_carriers = snakemake.config["renewable"]
extendable_carriers = snakemake.config["electricity"]["extendable_carriers"]
if not (set(renewable_carriers) & set(extendable_carriers["Generator"])):
logger.warning(
"No renewables found in config entry `extendable_carriers`. "
"In future versions, these have to be explicitly listed. "
"Falling back to all renewables."
)
conventional_carriers = snakemake.config["electricity"]["conventional_carriers"]
attach_load(
n,
snakemake.input.regions,
snakemake.input.load,
snakemake.input.nuts3_shapes,
snakemake.config["countries"],
snakemake.config["load"]["scaling_factor"],
params.countries,
params.scaling_factor,
)
update_transmission_costs(n, costs, snakemake.config["lines"]["length_factor"])
update_transmission_costs(n, costs, params.length_factor)
renewable_carriers = set(params.electricity["renewable_carriers"])
extendable_carriers = params.electricity["extendable_carriers"]
conventional_carriers = params.electricity["conventional_carriers"]
conventional_inputs = {
k: v for k, v in snakemake.input.items() if k.startswith("conventional_")
}
@ -776,7 +788,7 @@ if __name__ == "__main__":
ppl,
conventional_carriers,
extendable_carriers,
snakemake.config.get("conventional", {}),
params.conventional,
conventional_inputs,
)
@ -786,71 +798,36 @@ if __name__ == "__main__":
snakemake.input,
renewable_carriers,
extendable_carriers,
snakemake.config["lines"]["length_factor"],
params.length_factor,
)
if "hydro" in renewable_carriers:
conf = snakemake.config["renewable"]["hydro"]
para = params.renewable["hydro"]
attach_hydro(
n,
costs,
ppl,
snakemake.input.profile_hydro,
snakemake.input.hydro_capacities,
conf.pop("carriers", []),
**conf,
para.pop("carriers", []),
**para,
)
if "estimate_renewable_capacities" not in snakemake.config["electricity"]:
logger.warning(
"Missing key `estimate_renewable_capacities` under config entry `electricity`. "
"In future versions, this will raise an error. "
"Falling back to whether ``estimate_renewable_capacities_from_capacity_stats`` is in the config."
)
if (
"estimate_renewable_capacities_from_capacity_stats"
in snakemake.config["electricity"]
):
estimate_renewable_caps = {
"enable": True,
**snakemake.config["electricity"][
"estimate_renewable_capacities_from_capacity_stats"
],
}
else:
estimate_renewable_caps = {"enable": False}
else:
estimate_renewable_caps = snakemake.config["electricity"][
"estimate_renewable_capacities"
]
if "enable" not in estimate_renewable_caps:
logger.warning(
"Missing key `enable` under config entry `estimate_renewable_capacities`. "
"In future versions, this will raise an error. Falling back to False."
)
estimate_renewable_caps = {"enable": False}
if "from_opsd" not in estimate_renewable_caps:
logger.warning(
"Missing key `from_opsd` under config entry `estimate_renewable_capacities`. "
"In future versions, this will raise an error. "
"Falling back to whether `renewable_capacities_from_opsd` is non-empty."
)
from_opsd = bool(
snakemake.config["electricity"].get("renewable_capacities_from_opsd", False)
)
estimate_renewable_caps["from_opsd"] = from_opsd
estimate_renewable_caps = params.electricity["estimate_renewable_capacities"]
if estimate_renewable_caps["enable"]:
tech_map = estimate_renewable_caps["technology_mapping"]
expansion_limit = estimate_renewable_caps["expansion_limit"]
year = estimate_renewable_caps["year"]
if estimate_renewable_caps["from_opsd"]:
tech_map = snakemake.config["electricity"]["estimate_renewable_capacities"][
"technology_mapping"
]
attach_OPSD_renewables(n, tech_map)
estimate_renewable_capacities(n, snakemake.config)
estimate_renewable_capacities(
n, year, tech_map, expansion_limit, params.countries
)
update_p_nom_max(n)
add_nice_carrier_names(n, snakemake.config)
sanitize_carriers(n, snakemake.config)
n.meta = snakemake.config
n.export_to_netcdf(snakemake.output[0])

View File

@ -22,6 +22,7 @@ import numpy as np
import pypsa
import xarray as xr
from _helpers import override_component_attrs, update_config_with_sector_opts
from add_electricity import sanitize_carriers
from prepare_sector_network import cluster_heat_buses, define_spatial, prepare_costs
cc = coco.CountryConverter()
@ -157,7 +158,7 @@ def add_power_capacities_installed_before_baseyear(n, grouping_years, costs, bas
# Fill missing DateOut
dateout = (
df_agg.loc[biomass_i, "DateIn"]
+ snakemake.config["costs"]["fill_values"]["lifetime"]
+ snakemake.params.costs["fill_values"]["lifetime"]
)
df_agg.loc[biomass_i, "DateOut"] = df_agg.loc[biomass_i, "DateOut"].fillna(dateout)
@ -218,7 +219,7 @@ def add_power_capacities_installed_before_baseyear(n, grouping_years, costs, bas
capacity = df.loc[grouping_year, generator]
capacity = capacity[~capacity.isna()]
capacity = capacity[
capacity > snakemake.config["existing_capacities"]["threshold_capacity"]
capacity > snakemake.params.existing_capacities["threshold_capacity"]
]
suffix = "-ac" if generator == "offwind" else ""
name_suffix = f" {generator}{suffix}-{grouping_year}"
@ -582,7 +583,7 @@ def add_heating_capacities_installed_before_baseyear(
)
# delete links with capacities below threshold
threshold = snakemake.config["existing_capacities"]["threshold_capacity"]
threshold = snakemake.params.existing_capacities["threshold_capacity"]
n.mremove(
"Link",
[
@ -612,10 +613,10 @@ if __name__ == "__main__":
update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts)
options = snakemake.config["sector"]
options = snakemake.params.sector
opts = snakemake.wildcards.sector_opts.split("-")
baseyear = snakemake.config["scenario"]["planning_horizons"][0]
baseyear = snakemake.params.baseyear
overrides = override_component_attrs(snakemake.input.overrides)
n = pypsa.Network(snakemake.input.network, override_component_attrs=overrides)
@ -626,14 +627,12 @@ if __name__ == "__main__":
Nyears = n.snapshot_weightings.generators.sum() / 8760.0
costs = prepare_costs(
snakemake.input.costs,
snakemake.config["costs"],
snakemake.params.costs,
Nyears,
)
grouping_years_power = snakemake.config["existing_capacities"][
"grouping_years_power"
]
grouping_years_heat = snakemake.config["existing_capacities"]["grouping_years_heat"]
grouping_years_power = snakemake.params.existing_capacities["grouping_years_power"]
grouping_years_heat = snakemake.params.existing_capacities["grouping_years_heat"]
add_power_capacities_installed_before_baseyear(
n, grouping_years_power, costs, baseyear
)
@ -650,7 +649,7 @@ if __name__ == "__main__":
.to_pandas()
.reindex(index=n.snapshots)
)
default_lifetime = snakemake.config["costs"]["fill_values"]["lifetime"]
default_lifetime = snakemake.params.costs["fill_values"]["lifetime"]
add_heating_capacities_installed_before_baseyear(
n,
baseyear,
@ -667,4 +666,6 @@ if __name__ == "__main__":
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
sanitize_carriers(n, snakemake.config)
n.export_to_netcdf(snakemake.output[0])

View File

@ -56,22 +56,17 @@ import numpy as np
import pandas as pd
import pypsa
from _helpers import configure_logging
from add_electricity import (
_add_missing_carriers_from_costs,
add_nice_carrier_names,
load_costs,
)
from add_electricity import load_costs, sanitize_carriers
idx = pd.IndexSlice
logger = logging.getLogger(__name__)
def attach_storageunits(n, costs, elec_opts):
carriers = elec_opts["extendable_carriers"]["StorageUnit"]
max_hours = elec_opts["max_hours"]
def attach_storageunits(n, costs, extendable_carriers, max_hours):
carriers = extendable_carriers["StorageUnit"]
_add_missing_carriers_from_costs(n, costs, carriers)
n.madd("Carrier", carriers)
buses_i = n.buses.index
@ -99,10 +94,10 @@ def attach_storageunits(n, costs, elec_opts):
)
def attach_stores(n, costs, elec_opts):
carriers = elec_opts["extendable_carriers"]["Store"]
def attach_stores(n, costs, extendable_carriers):
carriers = extendable_carriers["Store"]
_add_missing_carriers_from_costs(n, costs, carriers)
n.madd("Carrier", carriers)
buses_i = n.buses.index
bus_sub_dict = {k: n.buses[k].values for k in ["x", "y", "country"]}
@ -162,6 +157,8 @@ def attach_stores(n, costs, elec_opts):
marginal_cost=costs.at["battery", "marginal_cost"],
)
n.madd("Carrier", ["battery charger", "battery discharger"])
n.madd(
"Link",
b_buses_i + " charger",
@ -187,11 +184,10 @@ def attach_stores(n, costs, elec_opts):
)
def attach_hydrogen_pipelines(n, costs, elec_opts):
ext_carriers = elec_opts["extendable_carriers"]
as_stores = ext_carriers.get("Store", [])
def attach_hydrogen_pipelines(n, costs, extendable_carriers):
as_stores = extendable_carriers.get("Store", [])
if "H2 pipeline" not in ext_carriers.get("Link", []):
if "H2 pipeline" not in extendable_carriers.get("Link", []):
return
assert "H2" in as_stores, (
@ -213,6 +209,8 @@ def attach_hydrogen_pipelines(n, costs, elec_opts):
h2_links.index = h2_links.apply(lambda c: f"H2 pipeline {c.bus0}-{c.bus1}", axis=1)
# add pipelines
n.add("Carrier", "H2 pipeline")
n.madd(
"Link",
h2_links.index,
@ -235,18 +233,19 @@ if __name__ == "__main__":
configure_logging(snakemake)
n = pypsa.Network(snakemake.input.network)
elec_config = snakemake.config["electricity"]
extendable_carriers = snakemake.params.extendable_carriers
max_hours = snakemake.params.max_hours
Nyears = n.snapshot_weightings.objective.sum() / 8760.0
costs = load_costs(
snakemake.input.tech_costs, snakemake.config["costs"], elec_config, Nyears
snakemake.input.tech_costs, snakemake.params.costs, max_hours, Nyears
)
attach_storageunits(n, costs, elec_config)
attach_stores(n, costs, elec_config)
attach_hydrogen_pipelines(n, costs, elec_config)
attach_storageunits(n, costs, extendable_carriers, max_hours)
attach_stores(n, costs, extendable_carriers)
attach_hydrogen_pipelines(n, costs, extendable_carriers)
add_nice_carrier_names(n, snakemake.config)
sanitize_carriers(n, snakemake.config)
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
n.export_to_netcdf(snakemake.output[0])

View File

@ -714,6 +714,7 @@ def base_network(
n.name = "PyPSA-Eur"
n.set_snapshots(pd.date_range(freq="h", **config["snapshots"]))
n.madd("Carrier", ["AC", "DC"])
n.import_components_from_dataframe(buses, "Bus")
n.import_components_from_dataframe(lines, "Line")

View File

@ -30,7 +30,7 @@ if __name__ == "__main__":
ammonia.index = cc.convert(ammonia.index, to="iso2")
years = [str(i) for i in range(2013, 2018)]
countries = ammonia.index.intersection(snakemake.config["countries"])
countries = ammonia.index.intersection(snakemake.params.countries)
ammonia = ammonia.loc[countries, years].astype(float)
# convert from ktonN to ktonNH3

View File

@ -210,9 +210,9 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_biomass_potentials", simpl="", clusters="5")
config = snakemake.config["biomass"]
year = config["year"]
scenario = config["scenario"]
params = snakemake.params.biomass
year = params["year"]
scenario = params["scenario"]
enspreso = enspreso_biomass_potentials(year, scenario)
@ -228,7 +228,7 @@ if __name__ == "__main__":
df.to_csv(snakemake.output.biomass_potentials_all)
grouper = {v: k for k, vv in config["classes"].items() for v in vv}
grouper = {v: k for k, vv in params["classes"].items() for v in vv}
df = df.groupby(grouper, axis=1).sum()
df *= 1e6 # TWh/a to MWh/a

View File

@ -116,7 +116,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_bus_regions")
configure_logging(snakemake)
countries = snakemake.config["countries"]
countries = snakemake.params.countries
n = pypsa.Network(snakemake.input.base_network)

View File

@ -39,7 +39,7 @@ if __name__ == "__main__":
for source in ["air", "soil"]:
source_T = xr.open_dataarray(snakemake.input[f"temp_{source}_{area}"])
delta_T = snakemake.config["sector"]["heat_pump_sink_T"] - source_T
delta_T = snakemake.params.heat_pump_sink_T - source_T
cop = coefficient_of_performance(delta_T, source)

View File

@ -106,9 +106,9 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_cutout", cutout="europe-2013-era5")
configure_logging(snakemake)
cutout_params = snakemake.config["atlite"]["cutouts"][snakemake.wildcards.cutout]
cutout_params = snakemake.params.cutouts[snakemake.wildcards.cutout]
snapshots = pd.date_range(freq="h", **snakemake.config["snapshots"])
snapshots = pd.date_range(freq="h", **snakemake.params.snapshots)
time = [snapshots[0], snapshots[-1]]
cutout_params["time"] = slice(*cutout_params.get("time", time))

View File

@ -279,16 +279,16 @@ if __name__ == "__main__":
configure_logging(snakemake)
powerstatistics = snakemake.config["load"]["power_statistics"]
interpolate_limit = snakemake.config["load"]["interpolate_limit"]
countries = snakemake.config["countries"]
snapshots = pd.date_range(freq="h", **snakemake.config["snapshots"])
powerstatistics = snakemake.params.load["power_statistics"]
interpolate_limit = snakemake.params.load["interpolate_limit"]
countries = snakemake.params.countries
snapshots = pd.date_range(freq="h", **snakemake.params.snapshots)
years = slice(snapshots[0], snapshots[-1])
time_shift = snakemake.config["load"]["time_shift_for_large_gaps"]
time_shift = snakemake.params.load["time_shift_for_large_gaps"]
load = load_timeseries(snakemake.input[0], years, countries, powerstatistics)
if snakemake.config["load"]["manual_adjustments"]:
if snakemake.params.load["manual_adjustments"]:
load = manual_adjustment(load, snakemake.input[0], powerstatistics)
logger.info(f"Linearly interpolate gaps of size {interpolate_limit} and less.")

View File

@ -737,16 +737,16 @@ if __name__ == "__main__":
logging.basicConfig(level=snakemake.config["logging"]["level"])
config = snakemake.config["energy"]
params = snakemake.params.energy
nuts3 = gpd.read_file(snakemake.input.nuts3_shapes).set_index("index")
population = nuts3["pop"].groupby(nuts3.country).sum()
countries = snakemake.config["countries"]
countries = snakemake.params.countries
idees_countries = pd.Index(countries).intersection(eu28)
data_year = config["energy_totals_year"]
report_year = snakemake.config["energy"]["eurostat_report_year"]
data_year = params["energy_totals_year"]
report_year = snakemake.params.energy["eurostat_report_year"]
input_eurostat = snakemake.input.eurostat
eurostat = build_eurostat(input_eurostat, countries, report_year, data_year)
swiss = build_swiss(data_year)
@ -755,8 +755,8 @@ if __name__ == "__main__":
energy = build_energy_totals(countries, eurostat, swiss, idees)
energy.to_csv(snakemake.output.energy_name)
base_year_emissions = config["base_emissions_year"]
emissions_scope = snakemake.config["energy"]["emissions"]
base_year_emissions = params["base_emissions_year"]
emissions_scope = snakemake.params.energy["emissions"]
eea_co2 = build_eea_co2(snakemake.input.co2, base_year_emissions, emissions_scope)
eurostat_co2 = build_eurostat_co2(
input_eurostat, countries, report_year, base_year_emissions

View File

@ -27,7 +27,7 @@ if __name__ == "__main__":
cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1)
client = Client(cluster, asynchronous=True)
time = pd.date_range(freq="h", **snakemake.config["snapshots"])
time = pd.date_range(freq="h", **snakemake.params.snapshots)
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
clustered_regions = (

View File

@ -130,10 +130,10 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_hydro_profile")
configure_logging(snakemake)
config_hydro = snakemake.config["renewable"]["hydro"]
params_hydro = snakemake.params.hydro
cutout = atlite.Cutout(snakemake.input.cutout)
countries = snakemake.config["countries"]
countries = snakemake.params.countries
country_shapes = (
gpd.read_file(snakemake.input.country_shapes)
.set_index("name")["geometry"]
@ -151,7 +151,7 @@ if __name__ == "__main__":
normalize_using_yearly=eia_stats,
)
if "clip_min_inflow" in config_hydro:
inflow = inflow.where(inflow > config_hydro["clip_min_inflow"], 0)
if "clip_min_inflow" in params_hydro:
inflow = inflow.where(inflow > params_hydro["clip_min_inflow"], 0)
inflow.to_netcdf(snakemake.output[0])

View File

@ -73,7 +73,7 @@ def prepare_hotmaps_database(regions):
df[["srid", "coordinates"]] = df.geom.str.split(";", expand=True)
if snakemake.config["industry"].get("hotmaps_locate_missing", False):
if snakemake.params.hotmaps_locate_missing:
df = locate_missing_industrial_sites(df)
# remove those sites without valid locations
@ -143,7 +143,7 @@ if __name__ == "__main__":
logging.basicConfig(level=snakemake.config["logging"]["level"])
countries = snakemake.config["countries"]
countries = snakemake.params.countries
regions = gpd.read_file(snakemake.input.regions_onshore).set_index("name")

View File

@ -101,8 +101,8 @@ def add_ammonia_energy_demand(demand):
def get_ammonia_by_fuel(x):
fuels = {
"gas": config["MWh_CH4_per_tNH3_SMR"],
"electricity": config["MWh_elec_per_tNH3_SMR"],
"gas": params["MWh_CH4_per_tNH3_SMR"],
"electricity": params["MWh_elec_per_tNH3_SMR"],
}
return pd.Series({k: x * v for k, v in fuels.items()})
@ -112,7 +112,7 @@ def add_ammonia_energy_demand(demand):
index=demand.index, fill_value=0.0
)
ammonia = pd.DataFrame({"ammonia": ammonia * config["MWh_NH3_per_tNH3"]}).T
ammonia = pd.DataFrame({"ammonia": ammonia * params["MWh_NH3_per_tNH3"]}).T
demand["Ammonia"] = ammonia.unstack().reindex(index=demand.index, fill_value=0.0)
@ -178,9 +178,9 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_industrial_energy_demand_per_country_today")
config = snakemake.config["industry"]
year = config.get("reference_year", 2015)
countries = pd.Index(snakemake.config["countries"])
params = snakemake.params.industry
year = params.get("reference_year", 2015)
countries = pd.Index(snakemake.params.countries)
demand = industrial_energy_demand(countries.intersection(eu28), year)

View File

@ -264,9 +264,9 @@ def separate_basic_chemicals(demand, year):
# assume HVC, methanol, chlorine production proportional to non-ammonia basic chemicals
distribution_key = demand["Basic chemicals"] / demand["Basic chemicals"].sum()
demand["HVC"] = config["HVC_production_today"] * 1e3 * distribution_key
demand["Chlorine"] = config["chlorine_production_today"] * 1e3 * distribution_key
demand["Methanol"] = config["methanol_production_today"] * 1e3 * distribution_key
demand["HVC"] = params["HVC_production_today"] * 1e3 * distribution_key
demand["Chlorine"] = params["chlorine_production_today"] * 1e3 * distribution_key
demand["Methanol"] = params["methanol_production_today"] * 1e3 * distribution_key
demand.drop(columns=["Basic chemicals"], inplace=True)
@ -279,11 +279,11 @@ if __name__ == "__main__":
logging.basicConfig(level=snakemake.config["logging"]["level"])
countries = snakemake.config["countries"]
countries = snakemake.params.countries
year = snakemake.config["industry"]["reference_year"]
year = snakemake.params.industry["reference_year"]
config = snakemake.config["industry"]
params = snakemake.params.industry
jrc_dir = snakemake.input.jrc
eurostat_dir = snakemake.input.eurostat

View File

@ -15,7 +15,7 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_industrial_production_per_country_tomorrow")
config = snakemake.config["industry"]
params = snakemake.params.industry
investment_year = int(snakemake.wildcards.planning_horizons)
@ -25,8 +25,8 @@ if __name__ == "__main__":
keys = ["Integrated steelworks", "Electric arc"]
total_steel = production[keys].sum(axis=1)
st_primary_fraction = get(config["St_primary_fraction"], investment_year)
dri_fraction = get(config["DRI_fraction"], investment_year)
st_primary_fraction = get(params["St_primary_fraction"], investment_year)
dri_fraction = get(params["DRI_fraction"], investment_year)
int_steel = production["Integrated steelworks"].sum()
fraction_persistent_primary = st_primary_fraction * total_steel.sum() / int_steel
@ -51,7 +51,7 @@ if __name__ == "__main__":
key_pri = "Aluminium - primary production"
key_sec = "Aluminium - secondary production"
al_primary_fraction = get(config["Al_primary_fraction"], investment_year)
al_primary_fraction = get(params["Al_primary_fraction"], investment_year)
fraction_persistent_primary = (
al_primary_fraction * total_aluminium.sum() / production[key_pri].sum()
)
@ -60,15 +60,15 @@ if __name__ == "__main__":
production[key_sec] = total_aluminium - production[key_pri]
production["HVC (mechanical recycling)"] = (
get(config["HVC_mechanical_recycling_fraction"], investment_year)
get(params["HVC_mechanical_recycling_fraction"], investment_year)
* production["HVC"]
)
production["HVC (chemical recycling)"] = (
get(config["HVC_chemical_recycling_fraction"], investment_year)
get(params["HVC_chemical_recycling_fraction"], investment_year)
* production["HVC"]
)
production["HVC"] *= get(config["HVC_primary_fraction"], investment_year)
production["HVC"] *= get(params["HVC_primary_fraction"], investment_year)
fn = snakemake.output.industrial_production_per_country_tomorrow
production.to_csv(fn, float_format="%.2f")

View File

@ -185,10 +185,10 @@ def iron_and_steel():
df[sector] = df["Electric arc"]
# add H2 consumption for DRI at 1.7 MWh H2 /ton steel
df.at["hydrogen", sector] = config["H2_DRI"]
df.at["hydrogen", sector] = params["H2_DRI"]
# add electricity consumption in DRI shaft (0.322 MWh/tSl)
df.at["elec", sector] += config["elec_DRI"]
df.at["elec", sector] += params["elec_DRI"]
## Integrated steelworks
# could be used in combination with CCS)
@ -383,19 +383,19 @@ def chemicals_industry():
assert s_emi.index[0] == sector
# convert from MtHVC/a to ktHVC/a
s_out = config["HVC_production_today"] * 1e3
s_out = params["HVC_production_today"] * 1e3
# tCO2/t material
df.loc["process emission", sector] += (
s_emi["Process emissions"]
- config["petrochemical_process_emissions"] * 1e3
- config["NH3_process_emissions"] * 1e3
- params["petrochemical_process_emissions"] * 1e3
- params["NH3_process_emissions"] * 1e3
) / s_out
# emissions originating from feedstock, could be non-fossil origin
# tCO2/t material
df.loc["process emission from feedstock", sector] += (
config["petrochemical_process_emissions"] * 1e3
params["petrochemical_process_emissions"] * 1e3
) / s_out
# convert from ktoe/a to GWh/a
@ -405,18 +405,18 @@ def chemicals_industry():
# subtract ammonia energy demand (in ktNH3/a)
ammonia = pd.read_csv(snakemake.input.ammonia_production, index_col=0)
ammonia_total = ammonia.loc[ammonia.index.intersection(eu28), str(year)].sum()
df.loc["methane", sector] -= ammonia_total * config["MWh_CH4_per_tNH3_SMR"]
df.loc["elec", sector] -= ammonia_total * config["MWh_elec_per_tNH3_SMR"]
df.loc["methane", sector] -= ammonia_total * params["MWh_CH4_per_tNH3_SMR"]
df.loc["elec", sector] -= ammonia_total * params["MWh_elec_per_tNH3_SMR"]
# subtract chlorine demand
chlorine_total = config["chlorine_production_today"]
df.loc["hydrogen", sector] -= chlorine_total * config["MWh_H2_per_tCl"]
df.loc["elec", sector] -= chlorine_total * config["MWh_elec_per_tCl"]
chlorine_total = params["chlorine_production_today"]
df.loc["hydrogen", sector] -= chlorine_total * params["MWh_H2_per_tCl"]
df.loc["elec", sector] -= chlorine_total * params["MWh_elec_per_tCl"]
# subtract methanol demand
methanol_total = config["methanol_production_today"]
df.loc["methane", sector] -= methanol_total * config["MWh_CH4_per_tMeOH"]
df.loc["elec", sector] -= methanol_total * config["MWh_elec_per_tMeOH"]
methanol_total = params["methanol_production_today"]
df.loc["methane", sector] -= methanol_total * params["MWh_CH4_per_tMeOH"]
df.loc["elec", sector] -= methanol_total * params["MWh_elec_per_tMeOH"]
# MWh/t material
df.loc[sources, sector] = df.loc[sources, sector] / s_out
@ -427,37 +427,37 @@ def chemicals_industry():
sector = "HVC (mechanical recycling)"
df[sector] = 0.0
df.loc["elec", sector] = config["MWh_elec_per_tHVC_mechanical_recycling"]
df.loc["elec", sector] = params["MWh_elec_per_tHVC_mechanical_recycling"]
# HVC chemical recycling
sector = "HVC (chemical recycling)"
df[sector] = 0.0
df.loc["elec", sector] = config["MWh_elec_per_tHVC_chemical_recycling"]
df.loc["elec", sector] = params["MWh_elec_per_tHVC_chemical_recycling"]
# Ammonia
sector = "Ammonia"
df[sector] = 0.0
if snakemake.config["sector"].get("ammonia", False):
df.loc["ammonia", sector] = config["MWh_NH3_per_tNH3"]
if snakemake.params.ammonia:
df.loc["ammonia", sector] = params["MWh_NH3_per_tNH3"]
else:
df.loc["hydrogen", sector] = config["MWh_H2_per_tNH3_electrolysis"]
df.loc["elec", sector] = config["MWh_elec_per_tNH3_electrolysis"]
df.loc["hydrogen", sector] = params["MWh_H2_per_tNH3_electrolysis"]
df.loc["elec", sector] = params["MWh_elec_per_tNH3_electrolysis"]
# Chlorine
sector = "Chlorine"
df[sector] = 0.0
df.loc["hydrogen", sector] = config["MWh_H2_per_tCl"]
df.loc["elec", sector] = config["MWh_elec_per_tCl"]
df.loc["hydrogen", sector] = params["MWh_H2_per_tCl"]
df.loc["elec", sector] = params["MWh_elec_per_tCl"]
# Methanol
sector = "Methanol"
df[sector] = 0.0
df.loc["methane", sector] = config["MWh_CH4_per_tMeOH"]
df.loc["elec", sector] = config["MWh_elec_per_tMeOH"]
df.loc["methane", sector] = params["MWh_CH4_per_tMeOH"]
df.loc["elec", sector] = params["MWh_elec_per_tMeOH"]
# Other chemicals
@ -1465,10 +1465,10 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_industry_sector_ratios")
# TODO make config option
# TODO make params option
year = 2015
config = snakemake.config["industry"]
params = snakemake.params.industry
df = pd.concat(
[

View File

@ -115,7 +115,7 @@ if __name__ == "__main__":
configure_logging(snakemake)
n = pypsa.Network(snakemake.input.base_network)
countries = snakemake.config["countries"]
countries = snakemake.params.countries
ppl = (
pm.powerplants(from_url=True)
@ -134,12 +134,12 @@ if __name__ == "__main__":
ppl = ppl.query('not (Country in @available_countries and Fueltype == "Bioenergy")')
ppl = pd.concat([ppl, opsd])
ppl_query = snakemake.config["electricity"]["powerplants_filter"]
ppl_query = snakemake.params.powerplants_filter
if isinstance(ppl_query, str):
ppl.query(ppl_query, inplace=True)
# add carriers from own powerplant files:
custom_ppl_query = snakemake.config["electricity"]["custom_powerplants"]
custom_ppl_query = snakemake.params.custom_powerplants
ppl = add_custom_powerplants(
ppl, snakemake.input.custom_powerplants, custom_ppl_query
)
@ -149,6 +149,7 @@ if __name__ == "__main__":
logging.warning(f"No powerplants known in: {', '.join(countries_wo_ppl)}")
substations = n.buses.query("substation_lv")
ppl = ppl.dropna(subset=["lat", "lon"])
ppl = map_country_bus(ppl, substations)
bus_null_b = ppl["bus"].isnull()

View File

@ -64,7 +64,7 @@ Inputs
- ``resources/offshore_shapes.geojson``: confer :ref:`shapes`
- ``resources/regions_onshore.geojson``: (if not offshore wind), confer :ref:`busregions`
- ``resources/regions_offshore.geojson``: (if offshore wind), :ref:`busregions`
- ``"cutouts/" + config["renewable"][{technology}]['cutout']``: :ref:`cutout`
- ``"cutouts/" + params["renewable"][{technology}]['cutout']``: :ref:`cutout`
- ``networks/base.nc``: :ref:`base`
Outputs
@ -188,7 +188,7 @@ import geopandas as gpd
import numpy as np
import xarray as xr
from _helpers import configure_logging
from dask.distributed import Client, LocalCluster
from dask.distributed import Client
from pypsa.geo import haversine
from shapely.geometry import LineString
@ -204,20 +204,23 @@ if __name__ == "__main__":
nprocesses = int(snakemake.threads)
noprogress = snakemake.config["run"].get("disable_progressbar", True)
config = snakemake.config["renewable"][snakemake.wildcards.technology]
resource = config["resource"] # pv panel config / wind turbine config
correction_factor = config.get("correction_factor", 1.0)
capacity_per_sqkm = config["capacity_per_sqkm"]
p_nom_max_meth = config.get("potential", "conservative")
noprogress = noprogress or not snakemake.config["atlite"]["show_progress"]
params = snakemake.params.renewable[snakemake.wildcards.technology]
resource = params["resource"] # pv panel params / wind turbine params
correction_factor = params.get("correction_factor", 1.0)
capacity_per_sqkm = params["capacity_per_sqkm"]
p_nom_max_meth = params.get("potential", "conservative")
if isinstance(config.get("corine", {}), list):
config["corine"] = {"grid_codes": config["corine"]}
if isinstance(params.get("corine", {}), list):
params["corine"] = {"grid_codes": params["corine"]}
if correction_factor != 1.0:
logger.info(f"correction_factor is set as {correction_factor}")
cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1)
client = Client(cluster, asynchronous=True)
if nprocesses > 1:
client = Client(n_workers=nprocesses, threads_per_worker=1)
else:
client = None
cutout = atlite.Cutout(snakemake.input.cutout)
regions = gpd.read_file(snakemake.input.regions)
@ -229,13 +232,13 @@ if __name__ == "__main__":
regions = regions.set_index("name").rename_axis("bus")
buses = regions.index
res = config.get("excluder_resolution", 100)
res = params.get("excluder_resolution", 100)
excluder = atlite.ExclusionContainer(crs=3035, res=res)
if config["natura"]:
if params["natura"]:
excluder.add_raster(snakemake.input.natura, nodata=0, allow_no_overlap=True)
corine = config.get("corine", {})
corine = params.get("corine", {})
if "grid_codes" in corine:
codes = corine["grid_codes"]
excluder.add_raster(snakemake.input.corine, codes=codes, invert=True, crs=3035)
@ -246,28 +249,28 @@ if __name__ == "__main__":
snakemake.input.corine, codes=codes, buffer=buffer, crs=3035
)
if "ship_threshold" in config:
if "ship_threshold" in params:
shipping_threshold = (
config["ship_threshold"] * 8760 * 6
params["ship_threshold"] * 8760 * 6
) # approximation because 6 years of data which is hourly collected
func = functools.partial(np.less, shipping_threshold)
excluder.add_raster(
snakemake.input.ship_density, codes=func, crs=4326, allow_no_overlap=True
)
if config.get("max_depth"):
if params.get("max_depth"):
# lambda not supported for atlite + multiprocessing
# use named function np.greater with partially frozen argument instead
# and exclude areas where: -max_depth > grid cell depth
func = functools.partial(np.greater, -config["max_depth"])
func = functools.partial(np.greater, -params["max_depth"])
excluder.add_raster(snakemake.input.gebco, codes=func, crs=4326, nodata=-1000)
if "min_shore_distance" in config:
buffer = config["min_shore_distance"]
if "min_shore_distance" in params:
buffer = params["min_shore_distance"]
excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer)
if "max_shore_distance" in config:
buffer = config["max_shore_distance"]
if "max_shore_distance" in params:
buffer = params["max_shore_distance"]
excluder.add_geometry(
snakemake.input.country_shapes, buffer=buffer, invert=True
)
@ -289,7 +292,8 @@ if __name__ == "__main__":
potential = capacity_per_sqkm * availability.sum("bus") * area
func = getattr(cutout, resource.pop("method"))
resource["dask_kwargs"] = {"scheduler": client}
if client is not None:
resource["dask_kwargs"] = {"scheduler": client}
capacity_factor = correction_factor * func(capacity_factor=True, **resource)
layout = capacity_factor * area * capacity_per_sqkm
profile, capacities = func(
@ -358,13 +362,13 @@ if __name__ == "__main__":
# select only buses with some capacity and minimal capacity factor
ds = ds.sel(
bus=(
(ds["profile"].mean("time") > config.get("min_p_max_pu", 0.0))
& (ds["p_nom_max"] > config.get("min_p_nom_max", 0.0))
(ds["profile"].mean("time") > params.get("min_p_max_pu", 0.0))
& (ds["p_nom_max"] > params.get("min_p_nom_max", 0.0))
)
)
if "clip_p_max_pu" in config:
min_p_max_pu = config["clip_p_max_pu"]
if "clip_p_max_pu" in params:
min_p_max_pu = params["clip_p_max_pu"]
ds["profile"] = ds["profile"].where(ds["profile"] >= min_p_max_pu, 0)
ds.to_netcdf(snakemake.output.profile)

View File

@ -305,7 +305,7 @@ def prepare_building_stock_data():
u_values.set_index(["country_code", "subsector", "bage", "type"], inplace=True)
# only take in config.yaml specified countries into account
countries = snakemake.config["countries"]
countries = snakemake.params.countries
area_tot = area_tot.loc[countries]
return u_values, country_iso_dic, countries, area_tot, area
@ -1040,7 +1040,7 @@ if __name__ == "__main__":
# ******** config *********************************************************
retro_opts = snakemake.config["sector"]["retrofitting"]
retro_opts = snakemake.params.retrofitting
interest_rate = retro_opts["interest_rate"]
annualise_cost = retro_opts["annualise_cost"] # annualise the investment costs
tax_weighting = retro_opts[

View File

@ -41,7 +41,7 @@ if __name__ == "__main__":
"build_sequestration_potentials", simpl="", clusters="181"
)
cf = snakemake.config["sector"]["regional_co2_sequestration_potential"]
cf = snakemake.params.sequestration_potential
gdf = gpd.read_file(snakemake.input.sequestration_potential[0])

View File

@ -255,13 +255,11 @@ if __name__ == "__main__":
snakemake = mock_snakemake("build_shapes")
configure_logging(snakemake)
country_shapes = countries(
snakemake.input.naturalearth, snakemake.config["countries"]
)
country_shapes = countries(snakemake.input.naturalearth, snakemake.params.countries)
country_shapes.reset_index().to_file(snakemake.output.country_shapes)
offshore_shapes = eez(
country_shapes, snakemake.input.eez, snakemake.config["countries"]
country_shapes, snakemake.input.eez, snakemake.params.countries
)
offshore_shapes.reset_index().to_file(snakemake.output.offshore_shapes)

View File

@ -27,9 +27,9 @@ if __name__ == "__main__":
cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1)
client = Client(cluster, asynchronous=True)
config = snakemake.config["solar_thermal"]
config = snakemake.params.solar_thermal
time = pd.date_range(freq="h", **snakemake.config["snapshots"])
time = pd.date_range(freq="h", **snakemake.params.snapshots)
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
clustered_regions = (

View File

@ -27,7 +27,7 @@ if __name__ == "__main__":
cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1)
client = Client(cluster, asynchronous=True)
time = pd.date_range(freq="h", **snakemake.config["snapshots"])
time = pd.date_range(freq="h", **snakemake.params.snapshots)
cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time)
clustered_regions = (

View File

@ -175,9 +175,9 @@ if __name__ == "__main__":
snakemake.input.pop_weighted_energy_totals, index_col=0
)
options = snakemake.config["sector"]
options = snakemake.params.sector
snapshots = pd.date_range(freq="h", **snakemake.config["snapshots"], tz="UTC")
snapshots = pd.date_range(freq="h", **snakemake.params.snapshots, tz="UTC")
nyears = len(snapshots) / 8760

View File

@ -89,7 +89,7 @@ Description
**Is it possible to run the model without the** ``simplify_network`` **rule?**
No, the network clustering methods in the PyPSA module
`pypsa.networkclustering <https://github.com/PyPSA/PyPSA/blob/master/pypsa/networkclustering.py>`_
`pypsa.clustering.spatial <https://github.com/PyPSA/PyPSA/blob/master/pypsa/clustering/spatial.py>`_
do not work reliably with multiple voltage levels and transformers.
.. tip::
@ -134,7 +134,7 @@ import pyomo.environ as po
import pypsa
import seaborn as sns
from _helpers import configure_logging, get_aggregation_strategies, update_p_nom_max
from pypsa.networkclustering import (
from pypsa.clustering.spatial import (
busmap_by_greedy_modularity,
busmap_by_hac,
busmap_by_kmeans,
@ -186,7 +186,7 @@ def get_feature_for_hac(n, buses_i=None, feature=None):
if "offwind" in carriers:
carriers.remove("offwind")
carriers = np.append(
carriers, network.generators.carrier.filter(like="offwind").unique()
carriers, n.generators.carrier.filter(like="offwind").unique()
)
if feature.split("-")[1] == "cap":
@ -463,28 +463,18 @@ if __name__ == "__main__":
snakemake = mock_snakemake("cluster_network", simpl="", clusters="5")
configure_logging(snakemake)
params = snakemake.params
solver_name = snakemake.config["solving"]["solver"]["name"]
n = pypsa.Network(snakemake.input.network)
focus_weights = snakemake.config.get("focus_weights", None)
renewable_carriers = pd.Index(
[
tech
for tech in n.generators.carrier.unique()
if tech in snakemake.config["renewable"]
]
)
exclude_carriers = snakemake.config["clustering"]["cluster_network"].get(
"exclude_carriers", []
)
exclude_carriers = params.cluster_network["exclude_carriers"]
aggregate_carriers = set(n.generators.carrier) - set(exclude_carriers)
if snakemake.wildcards.clusters.endswith("m"):
n_clusters = int(snakemake.wildcards.clusters[:-1])
conventional = set(
snakemake.config["electricity"].get("conventional_carriers", [])
aggregate_carriers = set(params.conventional_carriers).intersection(
aggregate_carriers
)
aggregate_carriers = conventional.intersection(aggregate_carriers)
elif snakemake.wildcards.clusters == "all":
n_clusters = len(n.buses)
else:
@ -494,17 +484,16 @@ if __name__ == "__main__":
# Fast-path if no clustering is necessary
busmap = n.buses.index.to_series()
linemap = n.lines.index.to_series()
clustering = pypsa.networkclustering.Clustering(
clustering = pypsa.clustering.spatial.Clustering(
n, busmap, linemap, linemap, pd.Series(dtype="O")
)
else:
line_length_factor = snakemake.config["lines"]["length_factor"]
Nyears = n.snapshot_weightings.objective.sum() / 8760
hvac_overhead_cost = load_costs(
snakemake.input.tech_costs,
snakemake.config["costs"],
snakemake.config["electricity"],
params.costs,
params.max_hours,
Nyears,
).at["HVAC overhead", "capital_cost"]
@ -515,16 +504,16 @@ if __name__ == "__main__":
).all() or x.isnull().all(), "The `potential` configuration option must agree for all renewable carriers, for now!"
return v
aggregation_strategies = snakemake.config["clustering"].get(
"aggregation_strategies", {}
)
# translate str entries of aggregation_strategies to pd.Series functions:
aggregation_strategies = {
p: {k: getattr(pd.Series, v) for k, v in aggregation_strategies[p].items()}
for p in aggregation_strategies.keys()
p: {
k: getattr(pd.Series, v)
for k, v in params.aggregation_strategies[p].items()
}
for p in params.aggregation_strategies.keys()
}
custom_busmap = snakemake.config["enable"].get("custom_busmap", False)
custom_busmap = params.custom_busmap
if custom_busmap:
custom_busmap = pd.read_csv(
snakemake.input.custom_busmap, index_col=0, squeeze=True
@ -532,21 +521,18 @@ if __name__ == "__main__":
custom_busmap.index = custom_busmap.index.astype(str)
logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}")
cluster_config = snakemake.config.get("clustering", {}).get(
"cluster_network", {}
)
clustering = clustering_for_n_clusters(
n,
n_clusters,
custom_busmap,
aggregate_carriers,
line_length_factor,
aggregation_strategies,
snakemake.config["solving"]["solver"]["name"],
cluster_config.get("algorithm", "hac"),
cluster_config.get("feature", "solar+onwind-time"),
params.length_factor,
params.aggregation_strategies,
solver_name,
params.cluster_network["algorithm"],
params.cluster_network["feature"],
hvac_overhead_cost,
focus_weights,
params.focus_weights,
)
update_p_nom_max(clustering.network)

View File

@ -198,7 +198,7 @@ def calculate_costs(n, label, costs):
def calculate_cumulative_cost():
planning_horizons = snakemake.config["scenario"]["planning_horizons"]
planning_horizons = snakemake.params.scenario["planning_horizons"]
cumulative_cost = pd.DataFrame(
index=df["costs"].sum().index,
@ -688,19 +688,19 @@ if __name__ == "__main__":
(cluster, ll, opt + sector_opt, planning_horizon): "results/"
+ snakemake.params.RDIR
+ f"/postnetworks/elec_s{simpl}_{cluster}_l{ll}_{opt}_{sector_opt}_{planning_horizon}.nc"
for simpl in snakemake.config["scenario"]["simpl"]
for cluster in snakemake.config["scenario"]["clusters"]
for opt in snakemake.config["scenario"]["opts"]
for sector_opt in snakemake.config["scenario"]["sector_opts"]
for ll in snakemake.config["scenario"]["ll"]
for planning_horizon in snakemake.config["scenario"]["planning_horizons"]
for simpl in snakemake.params.scenario["simpl"]
for cluster in snakemake.params.scenario["clusters"]
for opt in snakemake.params.scenario["opts"]
for sector_opt in snakemake.params.scenario["sector_opts"]
for ll in snakemake.params.scenario["ll"]
for planning_horizon in snakemake.params.scenario["planning_horizons"]
}
Nyears = len(pd.date_range(freq="h", **snakemake.config["snapshots"])) / 8760
Nyears = len(pd.date_range(freq="h", **snakemake.params.snapshots)) / 8760
costs_db = prepare_costs(
snakemake.input.costs,
snakemake.config["costs"],
snakemake.params.costs,
Nyears,
)
@ -710,7 +710,7 @@ if __name__ == "__main__":
to_csv(df)
if snakemake.config["foresight"] == "myopic":
if snakemake.params.foresight == "myopic":
cumulative_cost = calculate_cumulative_cost()
cumulative_cost.to_csv(
"results/" + snakemake.params.RDIR + "/csvs/cumulative_cost.csv"

View File

@ -70,7 +70,7 @@ def plot_map(
transmission=False,
with_legend=True,
):
tech_colors = snakemake.config["plotting"]["tech_colors"]
tech_colors = snakemake.params.plotting["tech_colors"]
n = network.copy()
assign_location(n)
@ -116,9 +116,7 @@ def plot_map(
costs = costs.stack() # .sort_index()
# hack because impossible to drop buses...
eu_location = snakemake.config["plotting"].get(
"eu_node_location", dict(x=-5.5, y=46)
)
eu_location = snakemake.params.plotting.get("eu_node_location", dict(x=-5.5, y=46))
n.buses.loc["EU gas", "x"] = eu_location["x"]
n.buses.loc["EU gas", "y"] = eu_location["y"]
@ -315,7 +313,7 @@ def plot_h2_map(network, regions):
h2_new = n.links[n.links.carrier == "H2 pipeline"]
h2_retro = n.links[n.links.carrier == "H2 pipeline retrofitted"]
if snakemake.config["foresight"] == "myopic":
if snakemake.params.foresight == "myopic":
# sum capacitiy for pipelines from different investment periods
h2_new = group_pipes(h2_new)
@ -558,7 +556,7 @@ def plot_ch4_map(network):
link_widths_used = max_usage / linewidth_factor
link_widths_used[max_usage < line_lower_threshold] = 0.0
tech_colors = snakemake.config["plotting"]["tech_colors"]
tech_colors = snakemake.params.plotting["tech_colors"]
pipe_colors = {
"gas pipeline": "#f08080",
@ -700,7 +698,7 @@ def plot_map_without(network):
# hack because impossible to drop buses...
if "EU gas" in n.buses.index:
eu_location = snakemake.config["plotting"].get(
eu_location = snakemake.params.plotting.get(
"eu_node_location", dict(x=-5.5, y=46)
)
n.buses.loc["EU gas", "x"] = eu_location["x"]
@ -876,7 +874,7 @@ def plot_series(network, carrier="AC", name="test"):
stacked=True,
linewidth=0.0,
color=[
snakemake.config["plotting"]["tech_colors"][i.replace(suffix, "")]
snakemake.params.plotting["tech_colors"][i.replace(suffix, "")]
for i in new_columns
],
)
@ -937,7 +935,7 @@ if __name__ == "__main__":
regions = gpd.read_file(snakemake.input.regions).set_index("name")
map_opts = snakemake.config["plotting"]["map"]
map_opts = snakemake.params.plotting["map"]
if map_opts["boundaries"] is None:
map_opts["boundaries"] = regions.total_bounds[[0, 2, 1, 3]] + [-1, 1, -1, 1]

View File

@ -142,10 +142,10 @@ def plot_costs():
df = df.groupby(df.index.map(rename_techs)).sum()
to_drop = df.index[df.max(axis=1) < snakemake.config["plotting"]["costs_threshold"]]
to_drop = df.index[df.max(axis=1) < snakemake.params.plotting["costs_threshold"]]
logger.info(
f"Dropping technology with costs below {snakemake.config['plotting']['costs_threshold']} EUR billion per year"
f"Dropping technology with costs below {snakemake.params['plotting']['costs_threshold']} EUR billion per year"
)
logger.debug(df.loc[to_drop])
@ -165,7 +165,7 @@ def plot_costs():
kind="bar",
ax=ax,
stacked=True,
color=[snakemake.config["plotting"]["tech_colors"][i] for i in new_index],
color=[snakemake.params.plotting["tech_colors"][i] for i in new_index],
)
handles, labels = ax.get_legend_handles_labels()
@ -173,7 +173,7 @@ def plot_costs():
handles.reverse()
labels.reverse()
ax.set_ylim([0, snakemake.config["plotting"]["costs_max"]])
ax.set_ylim([0, snakemake.params.plotting["costs_max"]])
ax.set_ylabel("System Cost [EUR billion per year]")
@ -201,11 +201,11 @@ def plot_energy():
df = df.groupby(df.index.map(rename_techs)).sum()
to_drop = df.index[
df.abs().max(axis=1) < snakemake.config["plotting"]["energy_threshold"]
df.abs().max(axis=1) < snakemake.params.plotting["energy_threshold"]
]
logger.info(
f"Dropping all technology with energy consumption or production below {snakemake.config['plotting']['energy_threshold']} TWh/a"
f"Dropping all technology with energy consumption or production below {snakemake.params['plotting']['energy_threshold']} TWh/a"
)
logger.debug(df.loc[to_drop])
@ -227,7 +227,7 @@ def plot_energy():
kind="bar",
ax=ax,
stacked=True,
color=[snakemake.config["plotting"]["tech_colors"][i] for i in new_index],
color=[snakemake.params.plotting["tech_colors"][i] for i in new_index],
)
handles, labels = ax.get_legend_handles_labels()
@ -237,8 +237,8 @@ def plot_energy():
ax.set_ylim(
[
snakemake.config["plotting"]["energy_min"],
snakemake.config["plotting"]["energy_max"],
snakemake.params.plotting["energy_min"],
snakemake.params.plotting["energy_max"],
]
)
@ -287,7 +287,7 @@ def plot_balances():
df = df.groupby(df.index.map(rename_techs)).sum()
to_drop = df.index[
df.abs().max(axis=1) < snakemake.config["plotting"]["energy_threshold"] / 10
df.abs().max(axis=1) < snakemake.params.plotting["energy_threshold"] / 10
]
if v[0] in co2_carriers:
@ -296,7 +296,7 @@ def plot_balances():
units = "TWh/a"
logger.debug(
f"Dropping technology energy balance smaller than {snakemake.config['plotting']['energy_threshold']/10} {units}"
f"Dropping technology energy balance smaller than {snakemake.params['plotting']['energy_threshold']/10} {units}"
)
logger.debug(df.loc[to_drop])
@ -317,7 +317,7 @@ def plot_balances():
kind="bar",
ax=ax,
stacked=True,
color=[snakemake.config["plotting"]["tech_colors"][i] for i in new_index],
color=[snakemake.params.plotting["tech_colors"][i] for i in new_index],
)
handles, labels = ax.get_legend_handles_labels()
@ -455,10 +455,10 @@ def plot_carbon_budget_distribution(input_eurostat):
ax1 = plt.subplot(gs1[0, 0])
ax1.set_ylabel("CO$_2$ emissions (Gt per year)", fontsize=22)
ax1.set_ylim([0, 5])
ax1.set_xlim([1990, snakemake.config["scenario"]["planning_horizons"][-1] + 1])
ax1.set_xlim([1990, snakemake.params.planning_horizons[-1] + 1])
path_cb = "results/" + snakemake.params.RDIR + "/csvs/"
countries = snakemake.config["countries"]
countries = snakemake.params.countries
e_1990 = co2_emissions_year(countries, input_eurostat, opts, year=1990)
CO2_CAP = pd.read_csv(path_cb + "carbon_budget_distribution.csv", index_col=0)
@ -555,7 +555,7 @@ if __name__ == "__main__":
plot_balances()
for sector_opts in snakemake.config["scenario"]["sector_opts"]:
for sector_opts in snakemake.params.sector_opts:
opts = sector_opts.split("-")
for o in opts:
if "cb" in o:

View File

@ -253,12 +253,12 @@ if __name__ == "__main__":
Nyears = n.snapshot_weightings.objective.sum() / 8760.0
costs = load_costs(
snakemake.input.tech_costs,
snakemake.config["costs"],
snakemake.config["electricity"],
snakemake.params.costs,
snakemake.params.max_hours,
Nyears,
)
set_line_s_max_pu(n, snakemake.config["lines"]["s_max_pu"])
set_line_s_max_pu(n, snakemake.params.lines["s_max_pu"])
for o in opts:
m = re.match(r"^\d+h$", o, re.IGNORECASE)
@ -277,11 +277,11 @@ if __name__ == "__main__":
if "Co2L" in o:
m = re.findall("[0-9]*\.?[0-9]+$", o)
if len(m) > 0:
co2limit = float(m[0]) * snakemake.config["electricity"]["co2base"]
co2limit = float(m[0]) * snakemake.params.co2base
add_co2limit(n, co2limit, Nyears)
logger.info("Setting CO2 limit according to wildcard value.")
else:
add_co2limit(n, snakemake.config["electricity"]["co2limit"], Nyears)
add_co2limit(n, snakemake.params.co2limit, Nyears)
logger.info("Setting CO2 limit according to config value.")
break
@ -293,11 +293,13 @@ if __name__ == "__main__":
add_gaslimit(n, limit, Nyears)
logger.info("Setting gas usage limit according to wildcard value.")
else:
add_gaslimit(n, snakemake.config["electricity"].get("gaslimit"), Nyears)
add_gaslimit(n, snakemake.params.gaslimit, Nyears)
logger.info("Setting gas usage limit according to config value.")
break
for o in opts:
if "+" not in o:
continue
oo = o.split("+")
suptechs = map(lambda c: c.split("-", 2)[0], n.carriers.index)
if oo[0].startswith(tuple(suptechs)):
@ -322,7 +324,7 @@ if __name__ == "__main__":
add_emission_prices(n, dict(co2=float(m[0])))
else:
logger.info("Setting emission prices according to config value.")
add_emission_prices(n, snakemake.config["costs"]["emission_prices"])
add_emission_prices(n, snakemake.params.costs["emission_prices"])
break
ll_type, factor = snakemake.wildcards.ll[0], snakemake.wildcards.ll[1:]
@ -330,8 +332,8 @@ if __name__ == "__main__":
set_line_nom_max(
n,
s_nom_max_set=snakemake.config["lines"].get("s_nom_max,", np.inf),
p_nom_max_set=snakemake.config["links"].get("p_nom_max,", np.inf),
s_nom_max_set=snakemake.params.lines.get("s_nom_max,", np.inf),
p_nom_max_set=snakemake.params.links.get("p_nom_max,", np.inf),
)
if "ATK" in opts:

View File

@ -22,7 +22,7 @@ from _helpers import (
override_component_attrs,
update_config_with_sector_opts,
)
from add_electricity import calculate_annuity
from add_electricity import calculate_annuity, sanitize_carriers
from build_energy_totals import build_co2_totals, build_eea_co2, build_eurostat_co2
from networkx.algorithms import complement
from networkx.algorithms.connectivity.edge_augmentation import k_edge_augmentation
@ -200,12 +200,12 @@ def co2_emissions_year(
"""
Calculate CO2 emissions in one specific year (e.g. 1990 or 2018).
"""
emissions_scope = snakemake.config["energy"]["emissions"]
emissions_scope = snakemake.params.energy["emissions"]
eea_co2 = build_eea_co2(snakemake.input.co2, year, emissions_scope)
# TODO: read Eurostat data from year > 2014
# this only affects the estimation of CO2 emissions for BA, RS, AL, ME, MK
report_year = snakemake.config["energy"]["eurostat_report_year"]
report_year = snakemake.params.energy["eurostat_report_year"]
if year > 2014:
eurostat_co2 = build_eurostat_co2(
input_eurostat, countries, report_year, year=2014
@ -241,7 +241,7 @@ def build_carbon_budget(o, input_eurostat, fn, emissions_scope, report_year):
carbon_budget = float(o[o.find("cb") + 2 : o.find("ex")])
r = float(o[o.find("ex") + 2 :])
countries = snakemake.config["countries"]
countries = snakemake.params.countries
e_1990 = co2_emissions_year(
countries, input_eurostat, opts, emissions_scope, report_year, year=1990
@ -252,7 +252,7 @@ def build_carbon_budget(o, input_eurostat, fn, emissions_scope, report_year):
countries, input_eurostat, opts, emissions_scope, report_year, year=2018
)
planning_horizons = snakemake.config["scenario"]["planning_horizons"]
planning_horizons = snakemake.params.planning_horizons
t_0 = planning_horizons[0]
if "be" in o:
@ -391,7 +391,7 @@ def update_wind_solar_costs(n, costs):
with xr.open_dataset(profile) as ds:
underwater_fraction = ds["underwater_fraction"].to_pandas()
connection_cost = (
snakemake.config["lines"]["length_factor"]
snakemake.params.length_factor
* ds["average_distance"].to_pandas()
* (
underwater_fraction
@ -483,8 +483,8 @@ def remove_elec_base_techs(n):
batteries and H2) from base electricity-only network, since they're added
here differently using links.
"""
for c in n.iterate_components(snakemake.config["pypsa_eur"]):
to_keep = snakemake.config["pypsa_eur"][c.name]
for c in n.iterate_components(snakemake.params.pypsa_eur):
to_keep = snakemake.params.pypsa_eur[c.name]
to_remove = pd.Index(c.df.carrier.unique()).symmetric_difference(to_keep)
if to_remove.empty:
continue
@ -674,7 +674,7 @@ def add_dac(n, costs):
def add_co2limit(n, nyears=1.0, limit=0.0):
logger.info(f"Adding CO2 budget limit as per unit of 1990 levels of {limit}")
countries = snakemake.config["countries"]
countries = snakemake.params.countries
sectors = emission_sectors_from_opts(opts)
@ -727,7 +727,7 @@ def cycling_shift(df, steps=1):
return df
def prepare_costs(cost_file, config, nyears):
def prepare_costs(cost_file, params, nyears):
# set all asset costs and other parameters
costs = pd.read_csv(cost_file, index_col=[0, 1]).sort_index()
@ -739,7 +739,7 @@ def prepare_costs(cost_file, config, nyears):
costs.loc[:, "value"].unstack(level=1).groupby("technology").sum(min_count=1)
)
costs = costs.fillna(config["fill_values"])
costs = costs.fillna(params["fill_values"])
def annuity_factor(v):
return calculate_annuity(v["lifetime"], v["discount rate"]) + v["FOM"] / 100
@ -787,7 +787,7 @@ def add_ammonia(n, costs):
nodes = pop_layout.index
cf_industry = snakemake.config["industry"]
cf_industry = snakemake.params.industry
n.add("Carrier", "NH3")
@ -1102,7 +1102,7 @@ def add_storage_and_grids(n, costs):
lifetime=costs.at["OCGT", "lifetime"],
)
cavern_types = snakemake.config["sector"]["hydrogen_underground_storage_locations"]
cavern_types = snakemake.params.sector["hydrogen_underground_storage_locations"]
h2_caverns = pd.read_csv(snakemake.input.h2_cavern, index_col=0)
if (
@ -3274,7 +3274,7 @@ if __name__ == "__main__":
update_config_with_sector_opts(snakemake.config, snakemake.wildcards.sector_opts)
options = snakemake.config["sector"]
options = snakemake.params.sector
opts = snakemake.wildcards.sector_opts.split("-")
@ -3289,7 +3289,7 @@ if __name__ == "__main__":
costs = prepare_costs(
snakemake.input.costs,
snakemake.config["costs"],
snakemake.params.costs,
nyears,
)
@ -3301,10 +3301,10 @@ if __name__ == "__main__":
spatial = define_spatial(pop_layout.index, options)
if snakemake.config["foresight"] == "myopic":
if snakemake.params.foresight == "myopic":
add_lifetime_wind_solar(n, costs)
conventional = snakemake.config["existing_capacities"]["conventional_carriers"]
conventional = snakemake.params.conventional_carriers
for carrier in conventional:
add_carrier_buses(n, carrier)
@ -3373,15 +3373,15 @@ if __name__ == "__main__":
n = set_temporal_aggregation(n, opts, solver_name)
limit_type = "config"
limit = get(snakemake.config["co2_budget"], investment_year)
limit = get(snakemake.params.co2_budget, investment_year)
for o in opts:
if "cb" not in o:
continue
limit_type = "carbon budget"
fn = "results/" + snakemake.params.RDIR + "/csvs/carbon_budget_distribution.csv"
if not os.path.exists(fn):
emissions_scope = snakemake.config["energy"]["emissions"]
report_year = snakemake.config["energy"]["eurostat_report_year"]
emissions_scope = snakemake.params.emissions_scope
report_year = snakemake.params.eurostat_report_year
build_carbon_budget(
o, snakemake.input.eurostat, fn, emissions_scope, report_year
)
@ -3416,8 +3416,8 @@ if __name__ == "__main__":
if options["electricity_grid_connection"]:
add_electricity_grid_connection(n, costs)
first_year_myopic = (snakemake.config["foresight"] == "myopic") and (
snakemake.config["scenario"]["planning_horizons"][0] == investment_year
first_year_myopic = (snakemake.params.foresight == "myopic") and (
snakemake.params.planning_horizons[0] == investment_year
)
if options.get("cluster_heat_buses", False) and not first_year_myopic:
@ -3425,4 +3425,6 @@ if __name__ == "__main__":
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
sanitize_carriers(n, snakemake.config)
n.export_to_netcdf(snakemake.output[0])

View File

@ -95,13 +95,13 @@ import scipy as sp
from _helpers import configure_logging, get_aggregation_strategies, update_p_nom_max
from add_electricity import load_costs
from cluster_network import cluster_regions, clustering_for_n_clusters
from pypsa.io import import_components_from_dataframe, import_series_from_dataframe
from pypsa.networkclustering import (
from pypsa.clustering.spatial import (
aggregategenerators,
aggregateoneport,
busmap_by_stubs,
get_clustering_from_busmap,
)
from pypsa.io import import_components_from_dataframe, import_series_from_dataframe
from scipy.sparse.csgraph import connected_components, dijkstra
logger = logging.getLogger(__name__)
@ -149,17 +149,17 @@ def simplify_network_to_380(n):
return n, trafo_map
def _prepare_connection_costs_per_link(n, costs, config):
def _prepare_connection_costs_per_link(n, costs, renewable_carriers, length_factor):
if n.links.empty:
return {}
connection_costs_per_link = {}
for tech in config["renewable"]:
for tech in renewable_carriers:
if tech.startswith("offwind"):
connection_costs_per_link[tech] = (
n.links.length
* config["lines"]["length_factor"]
* length_factor
* (
n.links.underwater_fraction
* costs.at[tech + "-connection-submarine", "capital_cost"]
@ -172,10 +172,18 @@ def _prepare_connection_costs_per_link(n, costs, config):
def _compute_connection_costs_to_bus(
n, busmap, costs, config, connection_costs_per_link=None, buses=None
n,
busmap,
costs,
renewable_carriers,
length_factor,
connection_costs_per_link=None,
buses=None,
):
if connection_costs_per_link is None:
connection_costs_per_link = _prepare_connection_costs_per_link(n, costs, config)
connection_costs_per_link = _prepare_connection_costs_per_link(
n, costs, renewable_carriers, length_factor
)
if buses is None:
buses = busmap.index[busmap.index != busmap.values]
@ -265,7 +273,16 @@ def _aggregate_and_move_components(
n.mremove(c, df.index[df.bus0.isin(buses_to_del) | df.bus1.isin(buses_to_del)])
def simplify_links(n, costs, config, output, aggregation_strategies=dict()):
def simplify_links(
n,
costs,
renewables,
length_factor,
p_max_pu,
exclude_carriers,
output,
aggregation_strategies=dict(),
):
## Complex multi-node links are folded into end-points
logger.info("Simplifying connected link components")
@ -315,7 +332,9 @@ def simplify_links(n, costs, config, output, aggregation_strategies=dict()):
busmap = n.buses.index.to_series()
connection_costs_per_link = _prepare_connection_costs_per_link(n, costs, config)
connection_costs_per_link = _prepare_connection_costs_per_link(
n, costs, renewables, length_factor
)
connection_costs_to_bus = pd.DataFrame(
0.0, index=n.buses.index, columns=list(connection_costs_per_link)
)
@ -333,12 +352,17 @@ def simplify_links(n, costs, config, output, aggregation_strategies=dict()):
)
busmap.loc[buses] = b[np.r_[0, m.argmin(axis=0), 1]]
connection_costs_to_bus.loc[buses] += _compute_connection_costs_to_bus(
n, busmap, costs, config, connection_costs_per_link, buses
n,
busmap,
costs,
renewables,
length_factor,
connection_costs_per_link,
buses,
)
all_links = [i for _, i in sum(links, [])]
p_max_pu = config["links"].get("p_max_pu", 1.0)
lengths = n.links.loc[all_links, "length"]
name = lengths.idxmax() + "+{}".format(len(links) - 1)
params = dict(
@ -377,10 +401,6 @@ def simplify_links(n, costs, config, output, aggregation_strategies=dict()):
logger.debug("Collecting all components using the busmap")
exclude_carriers = config["clustering"]["simplify_network"].get(
"exclude_carriers", []
)
_aggregate_and_move_components(
n,
busmap,
@ -392,19 +412,23 @@ def simplify_links(n, costs, config, output, aggregation_strategies=dict()):
return n, busmap
def remove_stubs(n, costs, config, output, aggregation_strategies=dict()):
def remove_stubs(
n,
costs,
renewable_carriers,
length_factor,
simplify_network,
output,
aggregation_strategies=dict(),
):
logger.info("Removing stubs")
across_borders = config["clustering"]["simplify_network"].get(
"remove_stubs_across_borders", True
)
across_borders = simplify_network["remove_stubs_across_borders"]
matching_attrs = [] if across_borders else ["country"]
busmap = busmap_by_stubs(n, matching_attrs)
connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap, costs, config)
exclude_carriers = config["clustering"]["simplify_network"].get(
"exclude_carriers", []
connection_costs_to_bus = _compute_connection_costs_to_bus(
n, busmap, costs, renewable_carriers, length_factor
)
_aggregate_and_move_components(
@ -413,7 +437,7 @@ def remove_stubs(n, costs, config, output, aggregation_strategies=dict()):
connection_costs_to_bus,
output,
aggregation_strategies=aggregation_strategies,
exclude_carriers=exclude_carriers,
exclude_carriers=simplify_network["exclude_carriers"],
)
return n, busmap
@ -473,26 +497,22 @@ def aggregate_to_substations(n, aggregation_strategies=dict(), buses_i=None):
def cluster(
n, n_clusters, config, algorithm="hac", feature=None, aggregation_strategies=dict()
n,
n_clusters,
focus_weights,
solver_name,
algorithm="hac",
feature=None,
aggregation_strategies=dict(),
):
logger.info(f"Clustering to {n_clusters} buses")
focus_weights = config.get("focus_weights", None)
renewable_carriers = pd.Index(
[
tech
for tech in n.generators.carrier.unique()
if tech.split("-", 2)[0] in config["renewable"]
]
)
clustering = clustering_for_n_clusters(
n,
n_clusters,
custom_busmap=False,
aggregation_strategies=aggregation_strategies,
solver_name=config["solving"]["solver"]["name"],
solver_name=solver_name,
algorithm=algorithm,
feature=feature,
focus_weights=focus_weights,
@ -508,67 +528,69 @@ if __name__ == "__main__":
snakemake = mock_snakemake("simplify_network", simpl="")
configure_logging(snakemake)
n = pypsa.Network(snakemake.input.network)
params = snakemake.params
solver_name = snakemake.config["solving"]["solver"]["name"]
n = pypsa.Network(snakemake.input.network)
Nyears = n.snapshot_weightings.objective.sum() / 8760
aggregation_strategies = snakemake.config["clustering"].get(
"aggregation_strategies", {}
)
# translate str entries of aggregation_strategies to pd.Series functions:
aggregation_strategies = {
p: {k: getattr(pd.Series, v) for k, v in aggregation_strategies[p].items()}
for p in aggregation_strategies.keys()
p: {
k: getattr(pd.Series, v)
for k, v in params.aggregation_strategies[p].items()
}
for p in params.aggregation_strategies.keys()
}
n, trafo_map = simplify_network_to_380(n)
Nyears = n.snapshot_weightings.objective.sum() / 8760
technology_costs = load_costs(
snakemake.input.tech_costs,
snakemake.config["costs"],
snakemake.config["electricity"],
params.costs,
params.max_hours,
Nyears,
)
n, simplify_links_map = simplify_links(
n, technology_costs, snakemake.config, snakemake.output, aggregation_strategies
n,
technology_costs,
params.renewable_carriers,
params.length_factor,
params.p_max_pu,
params.simplify_network["exclude_carriers"],
snakemake.output,
aggregation_strategies,
)
busmaps = [trafo_map, simplify_links_map]
cluster_config = snakemake.config["clustering"]["simplify_network"]
if cluster_config.get("remove_stubs", True):
if params.simplify_network["remove_stubs"]:
n, stub_map = remove_stubs(
n,
technology_costs,
snakemake.config,
params.renewable_carriers,
params.length_factor,
params.simplify_network,
snakemake.output,
aggregation_strategies=aggregation_strategies,
)
busmaps.append(stub_map)
if cluster_config.get("to_substations", False):
if params.simplify_network["to_substations"]:
n, substation_map = aggregate_to_substations(n, aggregation_strategies)
busmaps.append(substation_map)
# treatment of outliers (nodes without a profile for considered carrier):
# all nodes that have no profile of the given carrier are being aggregated to closest neighbor
if (
snakemake.config.get("clustering", {})
.get("cluster_network", {})
.get("algorithm", "hac")
== "hac"
or cluster_config.get("algorithm", "hac") == "hac"
):
carriers = (
cluster_config.get("feature", "solar+onwind-time").split("-")[0].split("+")
)
if params.simplify_network["algorithm"] == "hac":
carriers = params.simplify_network["feature"].split("-")[0].split("+")
for carrier in carriers:
buses_i = list(
set(n.buses.index) - set(n.generators.query("carrier == @carrier").bus)
)
logger.info(
f"clustering preparaton (hac): aggregating {len(buses_i)} buses of type {carrier}."
f"clustering preparation (hac): aggregating {len(buses_i)} buses of type {carrier}."
)
n, busmap_hac = aggregate_to_substations(n, aggregation_strategies, buses_i)
busmaps.append(busmap_hac)
@ -577,9 +599,10 @@ if __name__ == "__main__":
n, cluster_map = cluster(
n,
int(snakemake.wildcards.simpl),
snakemake.config,
cluster_config.get("algorithm", "hac"),
cluster_config.get("feature", None),
params.focus_weights,
solver_name,
params.simplify_network["algorithm"],
params.simplify_network["feature"],
aggregation_strategies,
)
busmaps.append(cluster_map)

View File

@ -44,14 +44,14 @@ pypsa.pf.logger.setLevel(logging.WARNING)
from pypsa.descriptors import get_switchable_as_dense as get_as_dense
def add_land_use_constraint(n, config):
def add_land_use_constraint(n, planning_horizons, config):
if "m" in snakemake.wildcards.clusters:
_add_land_use_constraint_m(n, config)
_add_land_use_constraint_m(n, planning_horizons, config)
else:
_add_land_use_constraint(n, config)
_add_land_use_constraint(n)
def _add_land_use_constraint(n, config):
def _add_land_use_constraint(n):
# warning: this will miss existing offwind which is not classed AC-DC and has carrier 'offwind'
for carrier in ["solar", "onwind", "offwind-ac", "offwind-dc"]:
@ -80,10 +80,10 @@ def _add_land_use_constraint(n, config):
n.generators.p_nom_max.clip(lower=0, inplace=True)
def _add_land_use_constraint_m(n, config):
def _add_land_use_constraint_m(n, planning_horizons, config):
# if generators clustering is lower than network clustering, land_use accounting is at generators clusters
planning_horizons = config["scenario"]["planning_horizons"]
planning_horizons = param["planning_horizons"]
grouping_years = config["existing_capacities"]["grouping_years"]
current_horizon = snakemake.wildcards.planning_horizons
@ -141,7 +141,14 @@ def add_co2_sequestration_limit(n, limit=200):
)
def prepare_network(n, solve_opts=None, config=None):
def prepare_network(
n,
solve_opts=None,
config=None,
foresight=None,
planning_horizons=None,
co2_sequestration_potential=None,
):
if "clip_p_max_pu" in solve_opts:
for df in (
n.generators_t.p_max_pu,
@ -191,11 +198,11 @@ def prepare_network(n, solve_opts=None, config=None):
n.set_snapshots(n.snapshots[:nhours])
n.snapshot_weightings[:] = 8760.0 / nhours
if config["foresight"] == "myopic":
add_land_use_constraint(n, config)
if foresight == "myopic":
add_land_use_constraint(n, planning_horizons, config)
if n.stores.carrier.eq("co2 stored").any():
limit = config["sector"].get("co2_sequestration_potential", 200)
limit = co2_sequestration_potential
add_co2_sequestration_limit(n, limit=limit)
return n
@ -228,8 +235,7 @@ def add_CCL_constraints(n, config):
p_nom = n.model["Generator-p_nom"]
gens = n.generators.query("p_nom_extendable").rename_axis(index="Generator-ext")
grouper = [gens.bus.map(n.buses.country), gens.carrier]
grouper = xr.DataArray(pd.MultiIndex.from_arrays(grouper), dims=["Generator-ext"])
grouper = pd.concat([gens.bus.map(n.buses.country), gens.carrier])
lhs = p_nom.groupby(grouper).sum().rename(bus="country")
minimum = xr.DataArray(agg_p_nom_minmax["min"].dropna()).rename(dim_0="group")
@ -590,13 +596,11 @@ def extra_functionality(n, snapshots):
add_pipe_retrofit_constraint(n)
def solve_network(n, config, opts="", **kwargs):
set_of_options = config["solving"]["solver"]["options"]
solver_options = (
config["solving"]["solver_options"][set_of_options] if set_of_options else {}
)
solver_name = config["solving"]["solver"]["name"]
cf_solving = config["solving"]["options"]
def solve_network(n, config, solving, opts="", **kwargs):
set_of_options = solving["solver"]["options"]
solver_options = solving["solver_options"][set_of_options] if set_of_options else {}
solver_name = solving["solver"]["name"]
cf_solving = solving["options"]
track_iterations = cf_solving.get("track_iterations", False)
min_iterations = cf_solving.get("min_iterations", 4)
max_iterations = cf_solving.get("max_iterations", 6)
@ -665,7 +669,7 @@ if __name__ == "__main__":
if "sector_opts" in snakemake.wildcards.keys():
opts += "-" + snakemake.wildcards.sector_opts
opts = [o for o in opts.split("-") if o != ""]
solve_opts = snakemake.config["solving"]["options"]
solve_opts = snakemake.params.solving["options"]
np.random.seed(solve_opts.get("seed", 123))
@ -675,10 +679,21 @@ if __name__ == "__main__":
else:
n = pypsa.Network(snakemake.input.network)
n = prepare_network(n, solve_opts, config=snakemake.config)
n = prepare_network(
n,
solve_opts,
config=snakemake.config,
foresight=snakemake.params.foresight,
planning_horizons=snakemake.params.planning_horizons,
co2_sequestration_potential=snakemake.params["co2_sequestration_potential"],
)
n = solve_network(
n, config=snakemake.config, opts=opts, log_fn=snakemake.log.solver
n,
config=snakemake.config,
solving=snakemake.params.solving,
opts=opts,
log_fn=snakemake.log.solver,
)
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))

View File

@ -41,7 +41,7 @@ if __name__ == "__main__":
opts = (snakemake.wildcards.opts + "-" + snakemake.wildcards.sector_opts).split("-")
opts = [o for o in opts if o != ""]
solve_opts = snakemake.config["solving"]["options"]
solve_opts = snakemake.params.options
np.random.seed(solve_opts.get("seed", 123))