From f876d78ed4cfbc2d14d424066b25a598abc8deb3 Mon Sep 17 00:00:00 2001 From: Fabian Date: Fri, 19 Jan 2024 10:34:49 +0100 Subject: [PATCH 1/5] fix linting: run ruff --- scripts/_helpers.py | 2 -- scripts/add_brownfield.py | 4 ++-- scripts/build_electricity_demand.py | 1 - scripts/build_line_rating.py | 1 - scripts/build_renewable_profiles.py | 2 +- scripts/copy_config.py | 2 -- scripts/make_summary_perfect.py | 10 ++-------- scripts/plot_network.py | 1 - scripts/plot_summary.py | 1 - scripts/plot_validation_electricity_prices.py | 1 - scripts/prepare_network.py | 1 - scripts/simplify_network.py | 2 +- scripts/solve_network.py | 2 +- 13 files changed, 7 insertions(+), 23 deletions(-) diff --git a/scripts/_helpers.py b/scripts/_helpers.py index c5c96db9..67b20877 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -15,8 +15,6 @@ import pandas as pd import pytz import requests import yaml -from pypsa.components import component_attrs, components -from pypsa.descriptors import Dict from tqdm import tqdm logger = logging.getLogger(__name__) diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index cb1f51c8..229b8b07 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -121,7 +121,7 @@ def add_brownfield(n, n_p, year): def disable_grid_expansion_if_LV_limit_hit(n): - if not "lv_limit" in n.global_constraints.index: + if "lv_limit" not in n.global_constraints.index: return total_expansion = ( @@ -133,7 +133,7 @@ def disable_grid_expansion_if_LV_limit_hit(n): # allow small numerical differences if lv_limit - total_expansion < 1: - logger.info(f"LV is already reached, disabling expansion and LV limit") + logger.info("LV is already reached, disabling expansion and LV limit") extendable_acs = n.lines.query("s_nom_extendable").index n.lines.loc[extendable_acs, "s_nom_extendable"] = False n.lines.loc[extendable_acs, "s_nom"] = n.lines.loc[extendable_acs, "s_nom_min"] diff --git a/scripts/build_electricity_demand.py b/scripts/build_electricity_demand.py index d7d9927d..4706bf58 100755 --- a/scripts/build_electricity_demand.py +++ b/scripts/build_electricity_demand.py @@ -42,7 +42,6 @@ Outputs import logging logger = logging.getLogger(__name__) -import dateutil import numpy as np import pandas as pd from _helpers import configure_logging diff --git a/scripts/build_line_rating.py b/scripts/build_line_rating.py index 589f3656..4d45b910 100755 --- a/scripts/build_line_rating.py +++ b/scripts/build_line_rating.py @@ -50,7 +50,6 @@ With a heat balance considering the maximum temperature threshold of the transmi the maximal possible capacity factor "s_max_pu" for each transmission line at each time step is calculated. """ -import logging import re import atlite diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index f9db9271..6c450aca 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -340,7 +340,7 @@ if __name__ == "__main__": f"Completed weighted capacity factor time series calculation ({duration:2.2f}s)" ) - logger.info(f"Calculating maximal capacity per bus") + logger.info("Calculating maximal capacity per bus") p_nom_max = capacity_per_sqkm * availability @ area logger.info("Calculate average distances.") diff --git a/scripts/copy_config.py b/scripts/copy_config.py index a549d893..3dbee6a4 100644 --- a/scripts/copy_config.py +++ b/scripts/copy_config.py @@ -6,8 +6,6 @@ Copy used configuration files and important scripts for archiving. """ -from pathlib import Path -from shutil import copy import yaml diff --git a/scripts/make_summary_perfect.py b/scripts/make_summary_perfect.py index c387c6cf..93411a4c 100644 --- a/scripts/make_summary_perfect.py +++ b/scripts/make_summary_perfect.py @@ -12,15 +12,9 @@ other metrics. import numpy as np import pandas as pd import pypsa -from make_summary import ( - assign_carriers, - assign_locations, - calculate_cfs, - calculate_nodal_cfs, - calculate_nodal_costs, -) +from make_summary import assign_carriers, assign_locations from prepare_sector_network import prepare_costs -from pypsa.descriptors import get_active_assets, nominal_attrs +from pypsa.descriptors import get_active_assets from six import iteritems idx = pd.IndexSlice diff --git a/scripts/plot_network.py b/scripts/plot_network.py index 63b5d848..b34ba432 100644 --- a/scripts/plot_network.py +++ b/scripts/plot_network.py @@ -903,7 +903,6 @@ def plot_series(network, carrier="AC", name="test"): carrier, start, stop, - name, ), transparent=True, ) diff --git a/scripts/plot_summary.py b/scripts/plot_summary.py index 2a6c9f15..d29207af 100644 --- a/scripts/plot_summary.py +++ b/scripts/plot_summary.py @@ -12,7 +12,6 @@ logger = logging.getLogger(__name__) import matplotlib.gridspec as gridspec import matplotlib.pyplot as plt -import numpy as np import pandas as pd plt.style.use("ggplot") diff --git a/scripts/plot_validation_electricity_prices.py b/scripts/plot_validation_electricity_prices.py index 2a187b9f..38c9fe96 100644 --- a/scripts/plot_validation_electricity_prices.py +++ b/scripts/plot_validation_electricity_prices.py @@ -9,7 +9,6 @@ import pandas as pd import pypsa import seaborn as sns from _helpers import configure_logging -from pypsa.statistics import get_bus_and_carrier sns.set_theme("paper", style="whitegrid") diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index 632e6078..75e5daba 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -58,7 +58,6 @@ Description """ import logging -import re import numpy as np import pandas as pd diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index f88d10d4..f8629ea7 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -86,7 +86,7 @@ The rule :mod:`simplify_network` does up to four things: """ import logging -from functools import partial, reduce +from functools import reduce import numpy as np import pandas as pd diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 55704d4d..c6c1b10c 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -690,7 +690,7 @@ def add_battery_constraints(n): def add_lossy_bidirectional_link_constraints(n): - if not n.links.p_nom_extendable.any() or not "reversed" in n.links.columns: + if not n.links.p_nom_extendable.any() or "reversed" not in n.links.columns: return n.links["reversed"] = n.links.reversed.fillna(0).astype(bool) From b1d21813af24c531025d55eee19e67b30ebe1200 Mon Sep 17 00:00:00 2001 From: Fabian Date: Fri, 19 Jan 2024 10:47:58 +0100 Subject: [PATCH 2/5] fix import order --- scripts/_benchmark.py | 6 +++--- scripts/add_brownfield.py | 10 ++++------ scripts/add_existing_baseyear.py | 11 +++-------- scripts/build_biomass_potentials.py | 2 +- scripts/build_electricity_demand.py | 3 ++- scripts/build_energy_totals.py | 5 +---- scripts/build_gas_input_locations.py | 4 ++-- scripts/build_gas_network.py | 4 ++-- scripts/build_industrial_distribution_key.py | 4 +--- scripts/build_industrial_production_per_country.py | 6 ++---- scripts/build_population_layouts.py | 5 ++--- scripts/cluster_gas_network.py | 4 ++-- scripts/make_summary.py | 5 +---- scripts/plot_network.py | 3 +-- scripts/plot_summary.py | 3 +-- scripts/prepare_sector_network.py | 10 +++------- scripts/retrieve_sector_databundle.py | 5 ++--- scripts/solve_network.py | 2 +- 18 files changed, 34 insertions(+), 58 deletions(-) diff --git a/scripts/_benchmark.py b/scripts/_benchmark.py index 4e3413e9..ced102ba 100644 --- a/scripts/_benchmark.py +++ b/scripts/_benchmark.py @@ -13,15 +13,15 @@ import os import sys import time +from memory_profiler import _get_memory, choose_backend + logger = logging.getLogger(__name__) # TODO: provide alternative when multiprocessing is not available try: from multiprocessing import Pipe, Process except ImportError: - from multiprocessing.dummy import Process, Pipe - -from memory_profiler import _get_memory, choose_backend + from multiprocessing.dummy import Pipe, Process # The memory logging facilities have been adapted from memory_profiler diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index 229b8b07..ac58136a 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -8,17 +8,15 @@ Prepares brownfield data from previous planning horizon. import logging -logger = logging.getLogger(__name__) - -import pandas as pd - -idx = pd.IndexSlice - import numpy as np +import pandas as pd import pypsa from _helpers import update_config_with_sector_opts from add_existing_baseyear import add_build_year_to_new_assets +logger = logging.getLogger(__name__) +idx = pd.IndexSlice + def add_brownfield(n, n_p, year): logger.info(f"Preparing brownfield for the year {year}") diff --git a/scripts/add_existing_baseyear.py b/scripts/add_existing_baseyear.py index e7894324..c8486758 100644 --- a/scripts/add_existing_baseyear.py +++ b/scripts/add_existing_baseyear.py @@ -8,25 +8,20 @@ horizon. """ import logging - -logger = logging.getLogger(__name__) - -import pandas as pd - -idx = pd.IndexSlice - from types import SimpleNamespace import country_converter as coco import numpy as np +import pandas as pd import pypsa import xarray as xr from _helpers import update_config_with_sector_opts from add_electricity import sanitize_carriers from prepare_sector_network import cluster_heat_buses, define_spatial, prepare_costs +logger = logging.getLogger(__name__) cc = coco.CountryConverter() - +idx = pd.IndexSlice spatial = SimpleNamespace() diff --git a/scripts/build_biomass_potentials.py b/scripts/build_biomass_potentials.py index aae1fb98..b6cbbfbf 100644 --- a/scripts/build_biomass_potentials.py +++ b/scripts/build_biomass_potentials.py @@ -9,11 +9,11 @@ using data from JRC ENSPRESO. import logging -logger = logging.getLogger(__name__) import geopandas as gpd import numpy as np import pandas as pd +logger = logging.getLogger(__name__) AVAILABLE_BIOMASS_YEARS = [2010, 2020, 2030, 2040, 2050] diff --git a/scripts/build_electricity_demand.py b/scripts/build_electricity_demand.py index 4706bf58..a08055ba 100755 --- a/scripts/build_electricity_demand.py +++ b/scripts/build_electricity_demand.py @@ -41,12 +41,13 @@ Outputs import logging -logger = logging.getLogger(__name__) import numpy as np import pandas as pd from _helpers import configure_logging from pandas import Timedelta as Delta +logger = logging.getLogger(__name__) + def load_timeseries(fn, years, countries, powerstatistics=True): """ diff --git a/scripts/build_energy_totals.py b/scripts/build_energy_totals.py index 80c5d442..39b2a1be 100644 --- a/scripts/build_energy_totals.py +++ b/scripts/build_energy_totals.py @@ -7,9 +7,6 @@ Build total energy demands per country using JRC IDEES, eurostat, and EEA data. """ import logging - -logger = logging.getLogger(__name__) - import multiprocessing as mp from functools import partial @@ -21,7 +18,7 @@ from _helpers import mute_print from tqdm import tqdm cc = coco.CountryConverter() - +logger = logging.getLogger(__name__) idx = pd.IndexSlice diff --git a/scripts/build_gas_input_locations.py b/scripts/build_gas_input_locations.py index 9ad3760d..0c9e4c4b 100644 --- a/scripts/build_gas_input_locations.py +++ b/scripts/build_gas_input_locations.py @@ -9,12 +9,12 @@ production sites with data from SciGRID_gas and Global Energy Monitor. import logging -logger = logging.getLogger(__name__) - import geopandas as gpd import pandas as pd from cluster_gas_network import load_bus_regions +logger = logging.getLogger(__name__) + def read_scigrid_gas(fn): df = gpd.read_file(fn) diff --git a/scripts/build_gas_network.py b/scripts/build_gas_network.py index 92e686cd..8df7744d 100644 --- a/scripts/build_gas_network.py +++ b/scripts/build_gas_network.py @@ -9,13 +9,13 @@ Preprocess gas network based on data from bthe SciGRID_gas project import logging -logger = logging.getLogger(__name__) - import geopandas as gpd import pandas as pd from pypsa.geo import haversine_pts from shapely.geometry import Point +logger = logging.getLogger(__name__) + def diameter_to_capacity(pipe_diameter_mm): """ diff --git a/scripts/build_industrial_distribution_key.py b/scripts/build_industrial_distribution_key.py index e6d515b0..08de3bae 100644 --- a/scripts/build_industrial_distribution_key.py +++ b/scripts/build_industrial_distribution_key.py @@ -7,9 +7,6 @@ Build spatial distribution of industries from Hotmaps database. """ import logging - -logger = logging.getLogger(__name__) - import uuid from itertools import product @@ -18,6 +15,7 @@ import geopandas as gpd import pandas as pd from packaging.version import Version, parse +logger = logging.getLogger(__name__) cc = coco.CountryConverter() diff --git a/scripts/build_industrial_production_per_country.py b/scripts/build_industrial_production_per_country.py index 74cb1949..0aea4f15 100644 --- a/scripts/build_industrial_production_per_country.py +++ b/scripts/build_industrial_production_per_country.py @@ -7,11 +7,8 @@ Build industrial production per country. """ import logging -from functools import partial - -logger = logging.getLogger(__name__) - import multiprocessing as mp +from functools import partial import country_converter as coco import numpy as np @@ -19,6 +16,7 @@ import pandas as pd from _helpers import mute_print from tqdm import tqdm +logger = logging.getLogger(__name__) cc = coco.CountryConverter() tj_to_ktoe = 0.0238845 diff --git a/scripts/build_population_layouts.py b/scripts/build_population_layouts.py index e864d925..3e2b77d4 100644 --- a/scripts/build_population_layouts.py +++ b/scripts/build_population_layouts.py @@ -8,15 +8,14 @@ Build mapping between cutout grid cells and population (total, urban, rural). import logging -logger = logging.getLogger(__name__) - - import atlite import geopandas as gpd import numpy as np import pandas as pd import xarray as xr +logger = logging.getLogger(__name__) + if __name__ == "__main__": if "snakemake" not in globals(): from _helpers import mock_snakemake diff --git a/scripts/cluster_gas_network.py b/scripts/cluster_gas_network.py index e7554dff..b8da5012 100755 --- a/scripts/cluster_gas_network.py +++ b/scripts/cluster_gas_network.py @@ -8,14 +8,14 @@ Cluster gas transmission network to clustered model regions. import logging -logger = logging.getLogger(__name__) - import geopandas as gpd import pandas as pd from packaging.version import Version, parse from pypsa.geo import haversine_pts from shapely import wkt +logger = logging.getLogger(__name__) + def concat_gdf(gdf_list, crs="EPSG:4326"): """ diff --git a/scripts/make_summary.py b/scripts/make_summary.py index 7223f4d5..7f08b678 100644 --- a/scripts/make_summary.py +++ b/scripts/make_summary.py @@ -8,9 +8,6 @@ capacity factors, curtailment, energy balances, prices and other metrics. """ import logging - -logger = logging.getLogger(__name__) - import sys import numpy as np @@ -19,7 +16,7 @@ import pypsa from prepare_sector_network import prepare_costs idx = pd.IndexSlice - +logger = logging.getLogger(__name__) opt_name = {"Store": "e", "Line": "s", "Transformer": "s"} diff --git a/scripts/plot_network.py b/scripts/plot_network.py index b34ba432..d8c17587 100644 --- a/scripts/plot_network.py +++ b/scripts/plot_network.py @@ -13,8 +13,6 @@ nodes. import logging -logger = logging.getLogger(__name__) - import cartopy.crs as ccrs import geopandas as gpd import matplotlib.pyplot as plt @@ -24,6 +22,7 @@ from make_summary import assign_carriers from plot_summary import preferred_order, rename_techs from pypsa.plot import add_legend_circles, add_legend_lines, add_legend_patches +logger = logging.getLogger(__name__) plt.style.use(["ggplot"]) diff --git a/scripts/plot_summary.py b/scripts/plot_summary.py index d29207af..7e2c955d 100644 --- a/scripts/plot_summary.py +++ b/scripts/plot_summary.py @@ -8,12 +8,11 @@ Creates plots from summary CSV files. import logging -logger = logging.getLogger(__name__) - import matplotlib.gridspec as gridspec import matplotlib.pyplot as plt import pandas as pd +logger = logging.getLogger(__name__) plt.style.use("ggplot") from prepare_sector_network import co2_emissions_year diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 3be2bdac..21291d7e 100755 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -11,6 +11,7 @@ import logging import os import re from itertools import product +from types import SimpleNamespace import networkx as nx import numpy as np @@ -22,18 +23,13 @@ from add_electricity import calculate_annuity, sanitize_carriers from build_energy_totals import build_co2_totals, build_eea_co2, build_eurostat_co2 from networkx.algorithms import complement from networkx.algorithms.connectivity.edge_augmentation import k_edge_augmentation +from packaging.version import Version, parse from pypsa.geo import haversine_pts from pypsa.io import import_components_from_dataframe from scipy.stats import beta -logger = logging.getLogger(__name__) - -from types import SimpleNamespace - spatial = SimpleNamespace() - -from packaging.version import Version, parse - +logger = logging.getLogger(__name__) pd_version = parse(pd.__version__) agg_group_kwargs = dict(numeric_only=False) if pd_version >= Version("1.3") else {} diff --git a/scripts/retrieve_sector_databundle.py b/scripts/retrieve_sector_databundle.py index cb6cc969..5baf2c56 100644 --- a/scripts/retrieve_sector_databundle.py +++ b/scripts/retrieve_sector_databundle.py @@ -7,14 +7,13 @@ Retrieve and extract data bundle for sector-coupled studies. """ import logging - -logger = logging.getLogger(__name__) - import tarfile from pathlib import Path from _helpers import configure_logging, progress_retrieve, validate_checksum +logger = logging.getLogger(__name__) + if __name__ == "__main__": if "snakemake" not in globals(): from _helpers import mock_snakemake diff --git a/scripts/solve_network.py b/scripts/solve_network.py index c6c1b10c..4a1e27f9 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -39,10 +39,10 @@ import xarray as xr from _benchmark import memory_logger from _helpers import configure_logging, get_opt, update_config_with_sector_opts from pypsa.descriptors import get_activity_mask +from pypsa.descriptors import get_switchable_as_dense as get_as_dense logger = logging.getLogger(__name__) pypsa.pf.logger.setLevel(logging.WARNING) -from pypsa.descriptors import get_switchable_as_dense as get_as_dense def add_land_use_constraint(n, planning_horizons, config): From 025688bf70b50c182cd17fa358cc39c07604cdee Mon Sep 17 00:00:00 2001 From: Fabian Date: Fri, 19 Jan 2024 12:16:07 +0100 Subject: [PATCH 3/5] refactor and fix remaining linting problems --- scripts/_helpers.py | 24 +++++++++++++++---- scripts/add_electricity.py | 10 ++++---- scripts/build_biomass_transport_costs.py | 5 ++++ scripts/build_clustered_population_layouts.py | 2 +- scripts/build_gas_input_locations.py | 15 +++++++----- scripts/build_heat_demand.py | 2 +- scripts/build_industrial_distribution_key.py | 4 ++-- scripts/build_line_rating.py | 2 +- scripts/build_population_layouts.py | 2 +- scripts/build_retro_cost.py | 12 +++++----- scripts/build_temperature_profiles.py | 2 +- scripts/cluster_network.py | 5 +--- scripts/make_summary.py | 4 ---- scripts/plot_network.py | 3 +-- scripts/plot_summary.py | 3 +-- scripts/prepare_network.py | 2 +- scripts/prepare_perfect_foresight.py | 4 ++-- scripts/prepare_sector_network.py | 6 ++--- scripts/retrieve_monthly_fuel_prices.py | 5 ++-- scripts/solve_network.py | 4 ++-- 20 files changed, 64 insertions(+), 52 deletions(-) diff --git a/scripts/_helpers.py b/scripts/_helpers.py index 67b20877..d84f025b 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -360,8 +360,24 @@ def generate_periodic_profiles(dt_index, nodes, weekly_profile, localize=None): return week_df -def parse(l): - return yaml.safe_load(l[0]) if len(l) == 1 else {l.pop(0): parse(l)} +def parse(infix): + """ + Recursively parse a list into a dictionary or a YAML object. + + Parameters + ---------- + list_to_parse : list + The list to parse. + + Returns + ------- + dict or YAML object + The parsed list. + """ + if len(infix) == 1: + return yaml.safe_load(infix[0]) + else: + return {infix[0]: parse(infix[1:])} def update_config_with_sector_opts(config, sector_opts): @@ -369,8 +385,8 @@ def update_config_with_sector_opts(config, sector_opts): for o in sector_opts.split("-"): if o.startswith("CF+"): - l = o.split("+")[1:] - update_config(config, parse(l)) + infix = o.split("+")[1:] + update_config(config, parse(infix)) def get_checksum_from_zenodo(file_url): diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index e626f456..c9e5abca 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -294,10 +294,10 @@ def attach_load(n, regions, load, nuts3_shapes, ua_md_gdp, countries, scaling=1. nuts3 = gpd.read_file(nuts3_shapes).set_index("index") def upsample(cntry, group): - l = opsd_load[cntry] + load = opsd_load[cntry] if len(group) == 1: - return pd.DataFrame({group.index[0]: l}) + return pd.DataFrame({group.index[0]: load}) nuts3_cntry = nuts3.loc[nuts3.country == cntry] transfer = shapes_to_shapes(group, nuts3_cntry.geometry).T.tocsr() gdp_n = pd.Series( @@ -314,8 +314,8 @@ def attach_load(n, regions, load, nuts3_shapes, ua_md_gdp, countries, scaling=1. # overwrite factor because nuts3 provides no data for UA+MD factors = normed(ua_md_gdp.loc[group.index, "GDP_PPP"].squeeze()) return pd.DataFrame( - factors.values * l.values[:, np.newaxis], - index=l.index, + factors.values * load.values[:, np.newaxis], + index=load.index, columns=factors.index, ) @@ -622,7 +622,7 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **par hydro.max_hours > 0, hydro.country.map(max_hours_country) ).fillna(6) - if flatten_dispatch := params.get("flatten_dispatch", False): + if params.get("flatten_dispatch", False): buffer = params.get("flatten_dispatch_buffer", 0.2) average_capacity_factor = inflow_t[hydro.index].mean() / hydro["p_nom"] p_max_pu = (average_capacity_factor + buffer).clip(upper=1) diff --git a/scripts/build_biomass_transport_costs.py b/scripts/build_biomass_transport_costs.py index 9271b600..05b64519 100644 --- a/scripts/build_biomass_transport_costs.py +++ b/scripts/build_biomass_transport_costs.py @@ -80,4 +80,9 @@ def build_biomass_transport_costs(): if __name__ == "__main__": + if "snakemake" not in globals(): + from _helpers import mock_snakemake + + snakemake = mock_snakemake("build_biomass_transport_costs") + build_biomass_transport_costs() diff --git a/scripts/build_clustered_population_layouts.py b/scripts/build_clustered_population_layouts.py index 2f237656..f1d386bd 100644 --- a/scripts/build_clustered_population_layouts.py +++ b/scripts/build_clustered_population_layouts.py @@ -28,7 +28,7 @@ if __name__ == "__main__": gpd.read_file(snakemake.input.regions_onshore).set_index("name").buffer(0) ) - I = cutout.indicatormatrix(clustered_regions) + I = cutout.indicatormatrix(clustered_regions) # noqa: E741 pop = {} for item in ["total", "urban", "rural"]: diff --git a/scripts/build_gas_input_locations.py b/scripts/build_gas_input_locations.py index 0c9e4c4b..d8727063 100644 --- a/scripts/build_gas_input_locations.py +++ b/scripts/build_gas_input_locations.py @@ -27,8 +27,11 @@ def build_gem_lng_data(fn): df = pd.read_excel(fn[0], sheet_name="LNG terminals - data") df = df.set_index("ComboID") - remove_country = ["Cyprus", "Turkey"] - remove_terminal = ["Puerto de la Luz LNG Terminal", "Gran Canaria LNG Terminal"] + remove_country = ["Cyprus", "Turkey"] # noqa: F841 + remove_terminal = [ + "Puerto de la Luz LNG Terminal", + "Gran Canaria LNG Terminal", + ] # noqa: F841 df = df.query( "Status != 'Cancelled' \ @@ -45,8 +48,8 @@ def build_gem_prod_data(fn): df = pd.read_excel(fn[0], sheet_name="Gas extraction - main") df = df.set_index("GEM Unit ID") - remove_country = ["Cyprus", "Türkiye"] - remove_fuel_type = ["oil"] + remove_country = ["Cyprus", "Türkiye"] # noqa: F841 + remove_fuel_type = ["oil"] # noqa: F841 df = df.query( "Status != 'shut in' \ @@ -96,8 +99,8 @@ def build_gas_input_locations(gem_fn, entry_fn, sto_fn, countries): ] sto = read_scigrid_gas(sto_fn) - remove_country = ["RU", "UA", "TR", "BY"] - sto = sto.query("country_code != @remove_country") + remove_country = ["RU", "UA", "TR", "BY"] # noqa: F841 + sto = sto.query("country_code not in @remove_country") # production sites inside the model scope prod = build_gem_prod_data(gem_fn) diff --git a/scripts/build_heat_demand.py b/scripts/build_heat_demand.py index 77768404..b983f125 100644 --- a/scripts/build_heat_demand.py +++ b/scripts/build_heat_demand.py @@ -34,7 +34,7 @@ if __name__ == "__main__": gpd.read_file(snakemake.input.regions_onshore).set_index("name").buffer(0) ) - I = cutout.indicatormatrix(clustered_regions) + I = cutout.indicatormatrix(clustered_regions) # noqa: E741 pop_layout = xr.open_dataarray(snakemake.input.pop_layout) diff --git a/scripts/build_industrial_distribution_key.py b/scripts/build_industrial_distribution_key.py index 08de3bae..fe7cf0c1 100644 --- a/scripts/build_industrial_distribution_key.py +++ b/scripts/build_industrial_distribution_key.py @@ -30,7 +30,7 @@ def locate_missing_industrial_sites(df): try: from geopy.extra.rate_limiter import RateLimiter from geopy.geocoders import Nominatim - except: + except ImportError: raise ModuleNotFoundError( "Optional dependency 'geopy' not found." "Install via 'conda install -c conda-forge geopy'" @@ -99,7 +99,7 @@ def prepare_hotmaps_database(regions): # get all duplicated entries duplicated_i = gdf.index[gdf.index.duplicated()] # convert from raw data country name to iso-2-code - code = cc.convert(gdf.loc[duplicated_i, "Country"], to="iso2") + code = cc.convert(gdf.loc[duplicated_i, "Country"], to="iso2") # noqa: F841 # screen out malformed country allocation gdf_filtered = gdf.loc[duplicated_i].query("country == @code") # concat not duplicated and filtered gdf diff --git a/scripts/build_line_rating.py b/scripts/build_line_rating.py index 4d45b910..5b4cd6b3 100755 --- a/scripts/build_line_rating.py +++ b/scripts/build_line_rating.py @@ -98,7 +98,7 @@ def calculate_line_rating(n, cutout): ------- xarray DataArray object with maximal power. """ - relevant_lines = n.lines[(n.lines["underground"] == False)] + relevant_lines = n.lines[~n.lines["underground"]] buses = relevant_lines[["bus0", "bus1"]].values x = n.buses.x y = n.buses.y diff --git a/scripts/build_population_layouts.py b/scripts/build_population_layouts.py index 3e2b77d4..e215e6c0 100644 --- a/scripts/build_population_layouts.py +++ b/scripts/build_population_layouts.py @@ -33,7 +33,7 @@ if __name__ == "__main__": nuts3 = gpd.read_file(snakemake.input.nuts3_shapes).set_index("index") # Indicator matrix NUTS3 -> grid cells - I = atlite.cutout.compute_indicatormatrix(nuts3.geometry, grid_cells) + I = atlite.cutout.compute_indicatormatrix(nuts3.geometry, grid_cells) # noqa: E741 # Indicator matrix grid_cells -> NUTS3; inprinciple Iinv*I is identity # but imprecisions mean not perfect diff --git a/scripts/build_retro_cost.py b/scripts/build_retro_cost.py index d2aae140..67440263 100755 --- a/scripts/build_retro_cost.py +++ b/scripts/build_retro_cost.py @@ -554,7 +554,7 @@ def prepare_temperature_data(): # windows --------------------------------------------------------------- -def window_limit(l, window_assumptions): +def window_limit(l, window_assumptions): # noqa: E741 """ Define limit u value from which on window is retrofitted. """ @@ -567,7 +567,7 @@ def window_limit(l, window_assumptions): return m * l + a -def u_retro_window(l, window_assumptions): +def u_retro_window(l, window_assumptions): # noqa: E741 """ Define retrofitting value depending on renovation strength. """ @@ -580,7 +580,7 @@ def u_retro_window(l, window_assumptions): return max(m * l + a, 0.8) -def window_cost(u, cost_retro, window_assumptions): +def window_cost(u, cost_retro, window_assumptions): # noqa: E741 """ Get costs for new windows depending on u value. """ @@ -600,7 +600,7 @@ def window_cost(u, cost_retro, window_assumptions): return window_cost -def calculate_costs(u_values, l, cost_retro, window_assumptions): +def calculate_costs(u_values, l, cost_retro, window_assumptions): # noqa: E741 """ Returns costs for a given retrofitting strength weighted by the average surface/volume ratio of the component for each building type. @@ -626,7 +626,7 @@ def calculate_costs(u_values, l, cost_retro, window_assumptions): ) -def calculate_new_u(u_values, l, l_weight, window_assumptions, k=0.035): +def calculate_new_u(u_values, l, l_weight, window_assumptions, k=0.035): # noqa: E741 """ Calculate U-values after building retrofitting, depending on the old U-values (u_values). This is for simple insulation measuers, adding an @@ -746,7 +746,7 @@ def calculate_heat_losses(u_values, data_tabula, l_strength, temperature_factor) """ # (1) by transmission # calculate new U values of building elements due to additional insulation - for l in l_strength: + for l in l_strength: # noqa: E741 u_values[f"new_U_{l}"] = calculate_new_u( u_values, l, l_weight, window_assumptions ) diff --git a/scripts/build_temperature_profiles.py b/scripts/build_temperature_profiles.py index a13ec3c2..02fa4a71 100644 --- a/scripts/build_temperature_profiles.py +++ b/scripts/build_temperature_profiles.py @@ -34,7 +34,7 @@ if __name__ == "__main__": gpd.read_file(snakemake.input.regions_onshore).set_index("name").buffer(0) ) - I = cutout.indicatormatrix(clustered_regions) + I = cutout.indicatormatrix(clustered_regions) # noqa: E741 pop_layout = xr.open_dataarray(snakemake.input.pop_layout) diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 28f08396..01af29aa 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -133,6 +133,7 @@ import pyomo.environ as po import pypsa import seaborn as sns from _helpers import configure_logging, update_p_nom_max +from add_electricity import load_costs from pypsa.clustering.spatial import ( busmap_by_greedy_modularity, busmap_by_hac, @@ -141,11 +142,7 @@ from pypsa.clustering.spatial import ( ) warnings.filterwarnings(action="ignore", category=UserWarning) - -from add_electricity import load_costs - idx = pd.IndexSlice - logger = logging.getLogger(__name__) diff --git a/scripts/make_summary.py b/scripts/make_summary.py index 7f08b678..0fab5367 100644 --- a/scripts/make_summary.py +++ b/scripts/make_summary.py @@ -506,10 +506,6 @@ def calculate_weighted_prices(n, label, weighted_prices): if carrier in ["H2", "gas"]: load = pd.DataFrame(index=n.snapshots, columns=buses, data=0.0) - elif carrier[:5] == "space": - load = heat_demand_df[buses.str[:2]].rename( - columns=lambda i: str(i) + suffix - ) else: load = n.loads_t.p_set[buses] diff --git a/scripts/plot_network.py b/scripts/plot_network.py index d8c17587..6a3783e7 100644 --- a/scripts/plot_network.py +++ b/scripts/plot_network.py @@ -895,8 +895,7 @@ def plot_series(network, carrier="AC", name="test"): fig.tight_layout() fig.savefig( - "{}/{RDIR}maps/series-{}-{}-{}-{}-{}.pdf".format( - "results", + "results/{}maps/series-{}-{}-{}-{}.pdf".format( snakemake.params.RDIR, snakemake.wildcards["ll"], carrier, diff --git a/scripts/plot_summary.py b/scripts/plot_summary.py index 7e2c955d..b2ec0892 100644 --- a/scripts/plot_summary.py +++ b/scripts/plot_summary.py @@ -11,12 +11,11 @@ import logging import matplotlib.gridspec as gridspec import matplotlib.pyplot as plt import pandas as pd +from prepare_sector_network import co2_emissions_year logger = logging.getLogger(__name__) plt.style.use("ggplot") -from prepare_sector_network import co2_emissions_year - # consolidate and rename def rename_techs(label): diff --git a/scripts/prepare_network.py b/scripts/prepare_network.py index 75e5daba..5652dc6e 100755 --- a/scripts/prepare_network.py +++ b/scripts/prepare_network.py @@ -194,7 +194,7 @@ def apply_time_segmentation(n, segments, solver_name="cbc"): logger.info(f"Aggregating time series to {segments} segments.") try: import tsam.timeseriesaggregation as tsam - except: + except ImportError: raise ModuleNotFoundError( "Optional dependency 'tsam' not found." "Install via 'pip install tsam'" ) diff --git a/scripts/prepare_perfect_foresight.py b/scripts/prepare_perfect_foresight.py index 00f23fab..1c3a0ebe 100644 --- a/scripts/prepare_perfect_foresight.py +++ b/scripts/prepare_perfect_foresight.py @@ -305,7 +305,7 @@ def set_carbon_constraints(n, opts): m = re.match(r"^\d+p\d$", o, re.IGNORECASE) if m is not None: budget = snakemake.config["co2_budget"][m.group(0)] * 1e9 - if budget != None: + if budget is not None: logger.info(f"add carbon budget of {budget}") n.add( "GlobalConstraint", @@ -428,7 +428,7 @@ def apply_time_segmentation_perfect( """ try: import tsam.timeseriesaggregation as tsam - except: + except ImportError: raise ModuleNotFoundError( "Optional dependency 'tsam' not found." "Install via 'pip install tsam'" ) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 21291d7e..4d36e7d4 100755 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -183,8 +183,6 @@ def define_spatial(nodes, options): return spatial -from types import SimpleNamespace - spatial = SimpleNamespace() @@ -1472,7 +1470,6 @@ def add_land_transport(n, costs): # TODO options? logger.info("Add land transport") - nhours = n.snapshot_weightings.generators.sum() transport = pd.read_csv( snakemake.input.transport_demand, index_col=0, parse_dates=True @@ -3120,6 +3117,7 @@ def add_waste_heat(n): # TODO options? logger.info("Add possibility to use industrial waste heat in district heating") + cf_industry = snakemake.params.industry # AC buses with district heating urban_central = n.buses.index[n.buses.carrier == "urban central heat"] @@ -3480,7 +3478,7 @@ def apply_time_segmentation( """ try: import tsam.timeseriesaggregation as tsam - except: + except ImportError: raise ModuleNotFoundError( "Optional dependency 'tsam' not found." "Install via 'pip install tsam'" ) diff --git a/scripts/retrieve_monthly_fuel_prices.py b/scripts/retrieve_monthly_fuel_prices.py index 11e351ce..e64066cb 100644 --- a/scripts/retrieve_monthly_fuel_prices.py +++ b/scripts/retrieve_monthly_fuel_prices.py @@ -7,13 +7,12 @@ Retrieve monthly fuel prices from Destatis. """ import logging - -logger = logging.getLogger(__name__) - from pathlib import Path from _helpers import configure_logging, progress_retrieve +logger = logging.getLogger(__name__) + if __name__ == "__main__": if "snakemake" not in globals(): from _helpers import mock_snakemake diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 4a1e27f9..1c37bfd2 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -572,7 +572,7 @@ def add_SAFE_constraints(n, config): peakdemand = n.loads_t.p_set.sum(axis=1).max() margin = 1.0 + config["electricity"]["SAFE_reservemargin"] reserve_margin = peakdemand * margin - conventional_carriers = config["electricity"]["conventional_carriers"] + conventional_carriers = config["electricity"]["conventional_carriers"] # noqa: F841 ext_gens_i = n.generators.query( "carrier in @conventional_carriers & p_nom_extendable" ).index @@ -694,7 +694,7 @@ def add_lossy_bidirectional_link_constraints(n): return n.links["reversed"] = n.links.reversed.fillna(0).astype(bool) - carriers = n.links.loc[n.links.reversed, "carrier"].unique() + carriers = n.links.loc[n.links.reversed, "carrier"].unique() # noqa: F841 forward_i = n.links.query( "carrier in @carriers and ~reversed and p_nom_extendable" From 1ffb28b3fc4661c60d5a0a314cd8a94fb7a4268e Mon Sep 17 00:00:00 2001 From: Fabian Date: Fri, 19 Jan 2024 12:23:29 +0100 Subject: [PATCH 4/5] helpers: parse: fix docstring and take original implementation --- scripts/_helpers.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/scripts/_helpers.py b/scripts/_helpers.py index d84f025b..3173671d 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -362,7 +362,8 @@ def generate_periodic_profiles(dt_index, nodes, weekly_profile, localize=None): def parse(infix): """ - Recursively parse a list into a dictionary or a YAML object. + Recursively parse a chained wildcard expression into a dictionary or a YAML + object. Parameters ---------- @@ -377,7 +378,7 @@ def parse(infix): if len(infix) == 1: return yaml.safe_load(infix[0]) else: - return {infix[0]: parse(infix[1:])} + return {infix.pop(0): parse(infix)} def update_config_with_sector_opts(config, sector_opts): From d0cb57e9ab743d7099ccfbea2266788a9b02cb97 Mon Sep 17 00:00:00 2001 From: Fabian Date: Fri, 19 Jan 2024 12:37:07 +0100 Subject: [PATCH 5/5] make_summary_perfect: reinsert calclulate_<> functions --- scripts/build_gas_input_locations.py | 4 ++-- scripts/make_summary_perfect.py | 3 +++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/scripts/build_gas_input_locations.py b/scripts/build_gas_input_locations.py index d8727063..081f74b9 100644 --- a/scripts/build_gas_input_locations.py +++ b/scripts/build_gas_input_locations.py @@ -28,10 +28,10 @@ def build_gem_lng_data(fn): df = df.set_index("ComboID") remove_country = ["Cyprus", "Turkey"] # noqa: F841 - remove_terminal = [ + remove_terminal = [ # noqa: F841 "Puerto de la Luz LNG Terminal", "Gran Canaria LNG Terminal", - ] # noqa: F841 + ] df = df.query( "Status != 'Cancelled' \ diff --git a/scripts/make_summary_perfect.py b/scripts/make_summary_perfect.py index 93411a4c..555e5da5 100644 --- a/scripts/make_summary_perfect.py +++ b/scripts/make_summary_perfect.py @@ -12,6 +12,9 @@ other metrics. import numpy as np import pandas as pd import pypsa +from make_summary import calculate_cfs # noqa: F401 +from make_summary import calculate_nodal_cfs # noqa: F401 +from make_summary import calculate_nodal_costs # noqa: F401 from make_summary import assign_carriers, assign_locations from prepare_sector_network import prepare_costs from pypsa.descriptors import get_active_assets