scripts: apply linting fixes
This commit is contained in:
parent
a346c9994b
commit
5cf2b8174a
@ -18,7 +18,6 @@
|
||||
# serve to show the default.
|
||||
|
||||
import os
|
||||
import shlex
|
||||
import sys
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
|
@ -13,7 +13,6 @@ idx = pd.IndexSlice
|
||||
|
||||
import numpy as np
|
||||
import pypsa
|
||||
import yaml
|
||||
from _helpers import override_component_attrs, update_config_with_sector_opts
|
||||
from add_existing_baseyear import add_build_year_to_new_assets
|
||||
|
||||
|
@ -606,7 +606,7 @@ def attach_extendable_generators(n, costs, ppl, carriers):
|
||||
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
f"Adding extendable generators for carrier "
|
||||
"Adding extendable generators for carrier "
|
||||
"'{tech}' is not implemented, yet. "
|
||||
"Only OCGT, CCGT and nuclear are allowed at the moment."
|
||||
)
|
||||
|
@ -16,7 +16,6 @@ from types import SimpleNamespace
|
||||
import numpy as np
|
||||
import pypsa
|
||||
import xarray as xr
|
||||
import yaml
|
||||
from _helpers import override_component_attrs, update_config_with_sector_opts
|
||||
from prepare_sector_network import cluster_heat_buses, define_spatial, prepare_costs
|
||||
|
||||
|
@ -43,7 +43,6 @@ Description
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
import geopandas as gpd
|
||||
import numpy as np
|
||||
|
@ -15,7 +15,6 @@ logger = logging.getLogger(__name__)
|
||||
import geopandas as gpd
|
||||
import pandas as pd
|
||||
from cluster_gas_network import load_bus_regions
|
||||
from shapely import wkt
|
||||
|
||||
|
||||
def read_scigrid_gas(fn):
|
||||
|
@ -77,8 +77,10 @@ def load_timeseries(fn, years, countries, powerstatistics=True):
|
||||
|
||||
pattern = "power_statistics" if powerstatistics else "transparency"
|
||||
pattern = f"_load_actual_entsoe_{pattern}"
|
||||
rename = lambda s: s[: -len(pattern)]
|
||||
date_parser = lambda x: dateutil.parser.parse(x, ignoretz=True)
|
||||
def rename(s):
|
||||
return s[:-len(pattern)]
|
||||
def date_parser(x):
|
||||
return dateutil.parser.parse(x, ignoretz=True)
|
||||
return (
|
||||
pd.read_csv(fn, index_col=0, parse_dates=[0], date_parser=date_parser)
|
||||
.filter(like=pattern)
|
||||
|
@ -11,7 +11,6 @@ import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
import multiprocessing as mp
|
||||
|
||||
import atlite
|
||||
import geopandas as gpd
|
||||
|
@ -12,7 +12,6 @@ import sys
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
import pypsa
|
||||
import yaml
|
||||
from _helpers import override_component_attrs
|
||||
from prepare_sector_network import prepare_costs
|
||||
|
||||
@ -708,5 +707,5 @@ if __name__ == "__main__":
|
||||
if snakemake.config["foresight"] == "myopic":
|
||||
cumulative_cost = calculate_cumulative_cost()
|
||||
cumulative_cost.to_csv(
|
||||
f"results/" + snakemake.params.RDIR + "/csvs/cumulative_cost.csv"
|
||||
"results/" + snakemake.params.RDIR + "/csvs/cumulative_cost.csv"
|
||||
)
|
||||
|
@ -741,9 +741,8 @@ def prepare_costs(cost_file, config, Nyears):
|
||||
|
||||
costs = costs.fillna(config["fill_values"])
|
||||
|
||||
annuity_factor = (
|
||||
lambda v: annuity(v["lifetime"], v["discount rate"]) + v["FOM"] / 100
|
||||
)
|
||||
def annuity_factor(v):
|
||||
return annuity(v["lifetime"], v["discount rate"]) + v["FOM"] / 100
|
||||
costs["fixed"] = [
|
||||
annuity_factor(v) * v["investment"] * Nyears for i, v in costs.iterrows()
|
||||
]
|
||||
@ -3343,7 +3342,7 @@ if __name__ == "__main__":
|
||||
limit_type = "config"
|
||||
limit = get(snakemake.config["co2_budget"], investment_year)
|
||||
for o in opts:
|
||||
if not "cb" in o:
|
||||
if "cb" not in o:
|
||||
continue
|
||||
limit_type = "carbon budget"
|
||||
fn = "results/" + snakemake.params.RDIR + "/csvs/carbon_budget_distribution.csv"
|
||||
@ -3357,7 +3356,7 @@ if __name__ == "__main__":
|
||||
limit = co2_cap.loc[investment_year]
|
||||
break
|
||||
for o in opts:
|
||||
if not "Co2L" in o:
|
||||
if "Co2L" not in o:
|
||||
continue
|
||||
limit_type = "wildcard"
|
||||
limit = o[o.find("Co2L") + 4 :]
|
||||
|
@ -66,7 +66,7 @@ if __name__ == "__main__":
|
||||
logger.info(f"Downloading databundle from '{url}'.")
|
||||
progress_retrieve(url, tarball_fn)
|
||||
|
||||
logger.info(f"Extracting databundle.")
|
||||
logger.info("Extracting databundle.")
|
||||
tarfile.open(tarball_fn).extractall(to_fn)
|
||||
|
||||
tarball_fn.unlink()
|
||||
|
@ -34,7 +34,7 @@ if __name__ == "__main__":
|
||||
logger.info(f"Downloading databundle from '{url}'.")
|
||||
progress_retrieve(url, zip_fn)
|
||||
|
||||
logger.info(f"Extracting databundle.")
|
||||
logger.info("Extracting databundle.")
|
||||
zipfile.ZipFile(zip_fn).extractall(to_fn)
|
||||
|
||||
zip_fn.unlink()
|
||||
|
@ -31,7 +31,7 @@ if __name__ == "__main__":
|
||||
logger.info(f"Downloading databundle from '{url}'.")
|
||||
progress_retrieve(url, tarball_fn)
|
||||
|
||||
logger.info(f"Extracting databundle.")
|
||||
logger.info("Extracting databundle.")
|
||||
tarfile.open(tarball_fn).extractall(to_fn)
|
||||
|
||||
tarball_fn.unlink()
|
||||
|
@ -99,7 +99,7 @@ def add_co2_sequestration_limit(n, limit=200):
|
||||
|
||||
limit = limit * 1e6
|
||||
for o in opts:
|
||||
if not "seq" in o:
|
||||
if "seq" not in o:
|
||||
continue
|
||||
limit = float(o[o.find("seq") + 3 :]) * 1e6
|
||||
break
|
||||
|
Loading…
Reference in New Issue
Block a user