refactor and fix remaining linting problems
This commit is contained in:
parent
b1d21813af
commit
025688bf70
@ -360,8 +360,24 @@ def generate_periodic_profiles(dt_index, nodes, weekly_profile, localize=None):
|
||||
return week_df
|
||||
|
||||
|
||||
def parse(l):
|
||||
return yaml.safe_load(l[0]) if len(l) == 1 else {l.pop(0): parse(l)}
|
||||
def parse(infix):
|
||||
"""
|
||||
Recursively parse a list into a dictionary or a YAML object.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
list_to_parse : list
|
||||
The list to parse.
|
||||
|
||||
Returns
|
||||
-------
|
||||
dict or YAML object
|
||||
The parsed list.
|
||||
"""
|
||||
if len(infix) == 1:
|
||||
return yaml.safe_load(infix[0])
|
||||
else:
|
||||
return {infix[0]: parse(infix[1:])}
|
||||
|
||||
|
||||
def update_config_with_sector_opts(config, sector_opts):
|
||||
@ -369,8 +385,8 @@ def update_config_with_sector_opts(config, sector_opts):
|
||||
|
||||
for o in sector_opts.split("-"):
|
||||
if o.startswith("CF+"):
|
||||
l = o.split("+")[1:]
|
||||
update_config(config, parse(l))
|
||||
infix = o.split("+")[1:]
|
||||
update_config(config, parse(infix))
|
||||
|
||||
|
||||
def get_checksum_from_zenodo(file_url):
|
||||
|
@ -294,10 +294,10 @@ def attach_load(n, regions, load, nuts3_shapes, ua_md_gdp, countries, scaling=1.
|
||||
nuts3 = gpd.read_file(nuts3_shapes).set_index("index")
|
||||
|
||||
def upsample(cntry, group):
|
||||
l = opsd_load[cntry]
|
||||
load = opsd_load[cntry]
|
||||
|
||||
if len(group) == 1:
|
||||
return pd.DataFrame({group.index[0]: l})
|
||||
return pd.DataFrame({group.index[0]: load})
|
||||
nuts3_cntry = nuts3.loc[nuts3.country == cntry]
|
||||
transfer = shapes_to_shapes(group, nuts3_cntry.geometry).T.tocsr()
|
||||
gdp_n = pd.Series(
|
||||
@ -314,8 +314,8 @@ def attach_load(n, regions, load, nuts3_shapes, ua_md_gdp, countries, scaling=1.
|
||||
# overwrite factor because nuts3 provides no data for UA+MD
|
||||
factors = normed(ua_md_gdp.loc[group.index, "GDP_PPP"].squeeze())
|
||||
return pd.DataFrame(
|
||||
factors.values * l.values[:, np.newaxis],
|
||||
index=l.index,
|
||||
factors.values * load.values[:, np.newaxis],
|
||||
index=load.index,
|
||||
columns=factors.index,
|
||||
)
|
||||
|
||||
@ -622,7 +622,7 @@ def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **par
|
||||
hydro.max_hours > 0, hydro.country.map(max_hours_country)
|
||||
).fillna(6)
|
||||
|
||||
if flatten_dispatch := params.get("flatten_dispatch", False):
|
||||
if params.get("flatten_dispatch", False):
|
||||
buffer = params.get("flatten_dispatch_buffer", 0.2)
|
||||
average_capacity_factor = inflow_t[hydro.index].mean() / hydro["p_nom"]
|
||||
p_max_pu = (average_capacity_factor + buffer).clip(upper=1)
|
||||
|
@ -80,4 +80,9 @@ def build_biomass_transport_costs():
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if "snakemake" not in globals():
|
||||
from _helpers import mock_snakemake
|
||||
|
||||
snakemake = mock_snakemake("build_biomass_transport_costs")
|
||||
|
||||
build_biomass_transport_costs()
|
||||
|
@ -28,7 +28,7 @@ if __name__ == "__main__":
|
||||
gpd.read_file(snakemake.input.regions_onshore).set_index("name").buffer(0)
|
||||
)
|
||||
|
||||
I = cutout.indicatormatrix(clustered_regions)
|
||||
I = cutout.indicatormatrix(clustered_regions) # noqa: E741
|
||||
|
||||
pop = {}
|
||||
for item in ["total", "urban", "rural"]:
|
||||
|
@ -27,8 +27,11 @@ def build_gem_lng_data(fn):
|
||||
df = pd.read_excel(fn[0], sheet_name="LNG terminals - data")
|
||||
df = df.set_index("ComboID")
|
||||
|
||||
remove_country = ["Cyprus", "Turkey"]
|
||||
remove_terminal = ["Puerto de la Luz LNG Terminal", "Gran Canaria LNG Terminal"]
|
||||
remove_country = ["Cyprus", "Turkey"] # noqa: F841
|
||||
remove_terminal = [
|
||||
"Puerto de la Luz LNG Terminal",
|
||||
"Gran Canaria LNG Terminal",
|
||||
] # noqa: F841
|
||||
|
||||
df = df.query(
|
||||
"Status != 'Cancelled' \
|
||||
@ -45,8 +48,8 @@ def build_gem_prod_data(fn):
|
||||
df = pd.read_excel(fn[0], sheet_name="Gas extraction - main")
|
||||
df = df.set_index("GEM Unit ID")
|
||||
|
||||
remove_country = ["Cyprus", "Türkiye"]
|
||||
remove_fuel_type = ["oil"]
|
||||
remove_country = ["Cyprus", "Türkiye"] # noqa: F841
|
||||
remove_fuel_type = ["oil"] # noqa: F841
|
||||
|
||||
df = df.query(
|
||||
"Status != 'shut in' \
|
||||
@ -96,8 +99,8 @@ def build_gas_input_locations(gem_fn, entry_fn, sto_fn, countries):
|
||||
]
|
||||
|
||||
sto = read_scigrid_gas(sto_fn)
|
||||
remove_country = ["RU", "UA", "TR", "BY"]
|
||||
sto = sto.query("country_code != @remove_country")
|
||||
remove_country = ["RU", "UA", "TR", "BY"] # noqa: F841
|
||||
sto = sto.query("country_code not in @remove_country")
|
||||
|
||||
# production sites inside the model scope
|
||||
prod = build_gem_prod_data(gem_fn)
|
||||
|
@ -34,7 +34,7 @@ if __name__ == "__main__":
|
||||
gpd.read_file(snakemake.input.regions_onshore).set_index("name").buffer(0)
|
||||
)
|
||||
|
||||
I = cutout.indicatormatrix(clustered_regions)
|
||||
I = cutout.indicatormatrix(clustered_regions) # noqa: E741
|
||||
|
||||
pop_layout = xr.open_dataarray(snakemake.input.pop_layout)
|
||||
|
||||
|
@ -30,7 +30,7 @@ def locate_missing_industrial_sites(df):
|
||||
try:
|
||||
from geopy.extra.rate_limiter import RateLimiter
|
||||
from geopy.geocoders import Nominatim
|
||||
except:
|
||||
except ImportError:
|
||||
raise ModuleNotFoundError(
|
||||
"Optional dependency 'geopy' not found."
|
||||
"Install via 'conda install -c conda-forge geopy'"
|
||||
@ -99,7 +99,7 @@ def prepare_hotmaps_database(regions):
|
||||
# get all duplicated entries
|
||||
duplicated_i = gdf.index[gdf.index.duplicated()]
|
||||
# convert from raw data country name to iso-2-code
|
||||
code = cc.convert(gdf.loc[duplicated_i, "Country"], to="iso2")
|
||||
code = cc.convert(gdf.loc[duplicated_i, "Country"], to="iso2") # noqa: F841
|
||||
# screen out malformed country allocation
|
||||
gdf_filtered = gdf.loc[duplicated_i].query("country == @code")
|
||||
# concat not duplicated and filtered gdf
|
||||
|
@ -98,7 +98,7 @@ def calculate_line_rating(n, cutout):
|
||||
-------
|
||||
xarray DataArray object with maximal power.
|
||||
"""
|
||||
relevant_lines = n.lines[(n.lines["underground"] == False)]
|
||||
relevant_lines = n.lines[~n.lines["underground"]]
|
||||
buses = relevant_lines[["bus0", "bus1"]].values
|
||||
x = n.buses.x
|
||||
y = n.buses.y
|
||||
|
@ -33,7 +33,7 @@ if __name__ == "__main__":
|
||||
nuts3 = gpd.read_file(snakemake.input.nuts3_shapes).set_index("index")
|
||||
|
||||
# Indicator matrix NUTS3 -> grid cells
|
||||
I = atlite.cutout.compute_indicatormatrix(nuts3.geometry, grid_cells)
|
||||
I = atlite.cutout.compute_indicatormatrix(nuts3.geometry, grid_cells) # noqa: E741
|
||||
|
||||
# Indicator matrix grid_cells -> NUTS3; inprinciple Iinv*I is identity
|
||||
# but imprecisions mean not perfect
|
||||
|
@ -554,7 +554,7 @@ def prepare_temperature_data():
|
||||
|
||||
|
||||
# windows ---------------------------------------------------------------
|
||||
def window_limit(l, window_assumptions):
|
||||
def window_limit(l, window_assumptions): # noqa: E741
|
||||
"""
|
||||
Define limit u value from which on window is retrofitted.
|
||||
"""
|
||||
@ -567,7 +567,7 @@ def window_limit(l, window_assumptions):
|
||||
return m * l + a
|
||||
|
||||
|
||||
def u_retro_window(l, window_assumptions):
|
||||
def u_retro_window(l, window_assumptions): # noqa: E741
|
||||
"""
|
||||
Define retrofitting value depending on renovation strength.
|
||||
"""
|
||||
@ -580,7 +580,7 @@ def u_retro_window(l, window_assumptions):
|
||||
return max(m * l + a, 0.8)
|
||||
|
||||
|
||||
def window_cost(u, cost_retro, window_assumptions):
|
||||
def window_cost(u, cost_retro, window_assumptions): # noqa: E741
|
||||
"""
|
||||
Get costs for new windows depending on u value.
|
||||
"""
|
||||
@ -600,7 +600,7 @@ def window_cost(u, cost_retro, window_assumptions):
|
||||
return window_cost
|
||||
|
||||
|
||||
def calculate_costs(u_values, l, cost_retro, window_assumptions):
|
||||
def calculate_costs(u_values, l, cost_retro, window_assumptions): # noqa: E741
|
||||
"""
|
||||
Returns costs for a given retrofitting strength weighted by the average
|
||||
surface/volume ratio of the component for each building type.
|
||||
@ -626,7 +626,7 @@ def calculate_costs(u_values, l, cost_retro, window_assumptions):
|
||||
)
|
||||
|
||||
|
||||
def calculate_new_u(u_values, l, l_weight, window_assumptions, k=0.035):
|
||||
def calculate_new_u(u_values, l, l_weight, window_assumptions, k=0.035): # noqa: E741
|
||||
"""
|
||||
Calculate U-values after building retrofitting, depending on the old
|
||||
U-values (u_values). This is for simple insulation measuers, adding an
|
||||
@ -746,7 +746,7 @@ def calculate_heat_losses(u_values, data_tabula, l_strength, temperature_factor)
|
||||
"""
|
||||
# (1) by transmission
|
||||
# calculate new U values of building elements due to additional insulation
|
||||
for l in l_strength:
|
||||
for l in l_strength: # noqa: E741
|
||||
u_values[f"new_U_{l}"] = calculate_new_u(
|
||||
u_values, l, l_weight, window_assumptions
|
||||
)
|
||||
|
@ -34,7 +34,7 @@ if __name__ == "__main__":
|
||||
gpd.read_file(snakemake.input.regions_onshore).set_index("name").buffer(0)
|
||||
)
|
||||
|
||||
I = cutout.indicatormatrix(clustered_regions)
|
||||
I = cutout.indicatormatrix(clustered_regions) # noqa: E741
|
||||
|
||||
pop_layout = xr.open_dataarray(snakemake.input.pop_layout)
|
||||
|
||||
|
@ -133,6 +133,7 @@ import pyomo.environ as po
|
||||
import pypsa
|
||||
import seaborn as sns
|
||||
from _helpers import configure_logging, update_p_nom_max
|
||||
from add_electricity import load_costs
|
||||
from pypsa.clustering.spatial import (
|
||||
busmap_by_greedy_modularity,
|
||||
busmap_by_hac,
|
||||
@ -141,11 +142,7 @@ from pypsa.clustering.spatial import (
|
||||
)
|
||||
|
||||
warnings.filterwarnings(action="ignore", category=UserWarning)
|
||||
|
||||
from add_electricity import load_costs
|
||||
|
||||
idx = pd.IndexSlice
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -506,10 +506,6 @@ def calculate_weighted_prices(n, label, weighted_prices):
|
||||
|
||||
if carrier in ["H2", "gas"]:
|
||||
load = pd.DataFrame(index=n.snapshots, columns=buses, data=0.0)
|
||||
elif carrier[:5] == "space":
|
||||
load = heat_demand_df[buses.str[:2]].rename(
|
||||
columns=lambda i: str(i) + suffix
|
||||
)
|
||||
else:
|
||||
load = n.loads_t.p_set[buses]
|
||||
|
||||
|
@ -895,8 +895,7 @@ def plot_series(network, carrier="AC", name="test"):
|
||||
fig.tight_layout()
|
||||
|
||||
fig.savefig(
|
||||
"{}/{RDIR}maps/series-{}-{}-{}-{}-{}.pdf".format(
|
||||
"results",
|
||||
"results/{}maps/series-{}-{}-{}-{}.pdf".format(
|
||||
snakemake.params.RDIR,
|
||||
snakemake.wildcards["ll"],
|
||||
carrier,
|
||||
|
@ -11,12 +11,11 @@ import logging
|
||||
import matplotlib.gridspec as gridspec
|
||||
import matplotlib.pyplot as plt
|
||||
import pandas as pd
|
||||
from prepare_sector_network import co2_emissions_year
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
plt.style.use("ggplot")
|
||||
|
||||
from prepare_sector_network import co2_emissions_year
|
||||
|
||||
|
||||
# consolidate and rename
|
||||
def rename_techs(label):
|
||||
|
@ -194,7 +194,7 @@ def apply_time_segmentation(n, segments, solver_name="cbc"):
|
||||
logger.info(f"Aggregating time series to {segments} segments.")
|
||||
try:
|
||||
import tsam.timeseriesaggregation as tsam
|
||||
except:
|
||||
except ImportError:
|
||||
raise ModuleNotFoundError(
|
||||
"Optional dependency 'tsam' not found." "Install via 'pip install tsam'"
|
||||
)
|
||||
|
@ -305,7 +305,7 @@ def set_carbon_constraints(n, opts):
|
||||
m = re.match(r"^\d+p\d$", o, re.IGNORECASE)
|
||||
if m is not None:
|
||||
budget = snakemake.config["co2_budget"][m.group(0)] * 1e9
|
||||
if budget != None:
|
||||
if budget is not None:
|
||||
logger.info(f"add carbon budget of {budget}")
|
||||
n.add(
|
||||
"GlobalConstraint",
|
||||
@ -428,7 +428,7 @@ def apply_time_segmentation_perfect(
|
||||
"""
|
||||
try:
|
||||
import tsam.timeseriesaggregation as tsam
|
||||
except:
|
||||
except ImportError:
|
||||
raise ModuleNotFoundError(
|
||||
"Optional dependency 'tsam' not found." "Install via 'pip install tsam'"
|
||||
)
|
||||
|
@ -183,8 +183,6 @@ def define_spatial(nodes, options):
|
||||
return spatial
|
||||
|
||||
|
||||
from types import SimpleNamespace
|
||||
|
||||
spatial = SimpleNamespace()
|
||||
|
||||
|
||||
@ -1472,7 +1470,6 @@ def add_land_transport(n, costs):
|
||||
# TODO options?
|
||||
|
||||
logger.info("Add land transport")
|
||||
nhours = n.snapshot_weightings.generators.sum()
|
||||
|
||||
transport = pd.read_csv(
|
||||
snakemake.input.transport_demand, index_col=0, parse_dates=True
|
||||
@ -3120,6 +3117,7 @@ def add_waste_heat(n):
|
||||
# TODO options?
|
||||
|
||||
logger.info("Add possibility to use industrial waste heat in district heating")
|
||||
cf_industry = snakemake.params.industry
|
||||
|
||||
# AC buses with district heating
|
||||
urban_central = n.buses.index[n.buses.carrier == "urban central heat"]
|
||||
@ -3480,7 +3478,7 @@ def apply_time_segmentation(
|
||||
"""
|
||||
try:
|
||||
import tsam.timeseriesaggregation as tsam
|
||||
except:
|
||||
except ImportError:
|
||||
raise ModuleNotFoundError(
|
||||
"Optional dependency 'tsam' not found." "Install via 'pip install tsam'"
|
||||
)
|
||||
|
@ -7,13 +7,12 @@ Retrieve monthly fuel prices from Destatis.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
from _helpers import configure_logging, progress_retrieve
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if __name__ == "__main__":
|
||||
if "snakemake" not in globals():
|
||||
from _helpers import mock_snakemake
|
||||
|
@ -572,7 +572,7 @@ def add_SAFE_constraints(n, config):
|
||||
peakdemand = n.loads_t.p_set.sum(axis=1).max()
|
||||
margin = 1.0 + config["electricity"]["SAFE_reservemargin"]
|
||||
reserve_margin = peakdemand * margin
|
||||
conventional_carriers = config["electricity"]["conventional_carriers"]
|
||||
conventional_carriers = config["electricity"]["conventional_carriers"] # noqa: F841
|
||||
ext_gens_i = n.generators.query(
|
||||
"carrier in @conventional_carriers & p_nom_extendable"
|
||||
).index
|
||||
@ -694,7 +694,7 @@ def add_lossy_bidirectional_link_constraints(n):
|
||||
return
|
||||
|
||||
n.links["reversed"] = n.links.reversed.fillna(0).astype(bool)
|
||||
carriers = n.links.loc[n.links.reversed, "carrier"].unique()
|
||||
carriers = n.links.loc[n.links.reversed, "carrier"].unique() # noqa: F841
|
||||
|
||||
forward_i = n.links.query(
|
||||
"carrier in @carriers and ~reversed and p_nom_extendable"
|
||||
|
Loading…
Reference in New Issue
Block a user