merge helper.py into _helpers.py
This commit is contained in:
parent
939f23f5a4
commit
541b83b049
@ -5,12 +5,28 @@
|
|||||||
|
|
||||||
import urllib
|
import urllib
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
|
import contextlib
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import pytz
|
||||||
|
import yaml
|
||||||
|
from pypsa.components import component_attrs, components
|
||||||
|
from pypsa.descriptors import Dict
|
||||||
|
from snakemake.utils import update_config
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
REGION_COLS = ["geometry", "name", "x", "y", "country"]
|
REGION_COLS = ["geometry", "name", "x", "y", "country"]
|
||||||
|
|
||||||
|
# Define a context manager to temporarily mute print statements
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def mute_print():
|
||||||
|
with open(os.devnull, "w") as devnull:
|
||||||
|
with contextlib.redirect_stdout(devnull):
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
def configure_logging(snakemake, skip_handlers=False):
|
def configure_logging(snakemake, skip_handlers=False):
|
||||||
"""
|
"""
|
||||||
@ -340,3 +356,67 @@ def mock_snakemake(rulename, **wildcards):
|
|||||||
|
|
||||||
os.chdir(script_dir)
|
os.chdir(script_dir)
|
||||||
return snakemake
|
return snakemake
|
||||||
|
|
||||||
|
|
||||||
|
def override_component_attrs(directory):
|
||||||
|
"""Tell PyPSA that links can have multiple outputs by
|
||||||
|
overriding the component_attrs. This can be done for
|
||||||
|
as many buses as you need with format busi for i = 2,3,4,5,....
|
||||||
|
See https://pypsa.org/doc/components.html#link-with-multiple-outputs-or-inputs
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
directory : string
|
||||||
|
Folder where component attributes to override are stored
|
||||||
|
analogous to ``pypsa/component_attrs``, e.g. `links.csv`.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
Dictionary of overridden component attributes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
attrs = Dict({k: v.copy() for k, v in component_attrs.items()})
|
||||||
|
|
||||||
|
for component, list_name in components.list_name.items():
|
||||||
|
fn = f"{directory}/{list_name}.csv"
|
||||||
|
if os.path.isfile(fn):
|
||||||
|
overrides = pd.read_csv(fn, index_col=0, na_values="n/a")
|
||||||
|
attrs[component] = overrides.combine_first(attrs[component])
|
||||||
|
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
|
||||||
|
def generate_periodic_profiles(dt_index, nodes, weekly_profile, localize=None):
|
||||||
|
"""
|
||||||
|
Give a 24*7 long list of weekly hourly profiles, generate this for each
|
||||||
|
country for the period dt_index, taking account of time zones and summer
|
||||||
|
time.
|
||||||
|
"""
|
||||||
|
|
||||||
|
weekly_profile = pd.Series(weekly_profile, range(24 * 7))
|
||||||
|
|
||||||
|
week_df = pd.DataFrame(index=dt_index, columns=nodes)
|
||||||
|
|
||||||
|
for node in nodes:
|
||||||
|
timezone = pytz.timezone(pytz.country_timezones[node[:2]][0])
|
||||||
|
tz_dt_index = dt_index.tz_convert(timezone)
|
||||||
|
week_df[node] = [24 * dt.weekday() + dt.hour for dt in tz_dt_index]
|
||||||
|
week_df[node] = week_df[node].map(weekly_profile)
|
||||||
|
|
||||||
|
week_df = week_df.tz_localize(localize)
|
||||||
|
|
||||||
|
return week_df
|
||||||
|
|
||||||
|
|
||||||
|
def parse(l):
|
||||||
|
if len(l) == 1:
|
||||||
|
return yaml.safe_load(l[0])
|
||||||
|
else:
|
||||||
|
return {l.pop(0): parse(l)}
|
||||||
|
|
||||||
|
|
||||||
|
def update_config_with_sector_opts(config, sector_opts):
|
||||||
|
for o in sector_opts.split("-"):
|
||||||
|
if o.startswith("CF+"):
|
||||||
|
l = o.split("+")[1:]
|
||||||
|
update_config(config, parse(l))
|
||||||
|
@ -15,7 +15,7 @@ import numpy as np
|
|||||||
import pypsa
|
import pypsa
|
||||||
import yaml
|
import yaml
|
||||||
from add_existing_baseyear import add_build_year_to_new_assets
|
from add_existing_baseyear import add_build_year_to_new_assets
|
||||||
from helper import override_component_attrs, update_config_with_sector_opts
|
from _helpers import override_component_attrs, update_config_with_sector_opts
|
||||||
|
|
||||||
|
|
||||||
def add_brownfield(n, n_p, year):
|
def add_brownfield(n, n_p, year):
|
||||||
|
@ -17,7 +17,7 @@ import numpy as np
|
|||||||
import pypsa
|
import pypsa
|
||||||
import xarray as xr
|
import xarray as xr
|
||||||
import yaml
|
import yaml
|
||||||
from helper import override_component_attrs, update_config_with_sector_opts
|
from _helpers import override_component_attrs, update_config_with_sector_opts
|
||||||
from prepare_sector_network import cluster_heat_buses, define_spatial, prepare_costs
|
from prepare_sector_network import cluster_heat_buses, define_spatial, prepare_costs
|
||||||
|
|
||||||
spatial = SimpleNamespace()
|
spatial = SimpleNamespace()
|
||||||
|
@ -13,7 +13,7 @@ from functools import partial
|
|||||||
import geopandas as gpd
|
import geopandas as gpd
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from helper import mute_print
|
from _helpers import mute_print
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
|
|
||||||
idx = pd.IndexSlice
|
idx = pd.IndexSlice
|
||||||
|
@ -15,7 +15,7 @@ import multiprocessing as mp
|
|||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from helper import mute_print
|
from _helpers import mute_print
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
|
|
||||||
tj_to_ktoe = 0.0238845
|
tj_to_ktoe = 0.0238845
|
||||||
|
@ -8,7 +8,7 @@ Build industry sector ratios.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from helper import mute_print
|
from _helpers import mute_print
|
||||||
|
|
||||||
# GWh/ktoe OR MWh/toe
|
# GWh/ktoe OR MWh/toe
|
||||||
toe_to_MWh = 11.630
|
toe_to_MWh = 11.630
|
||||||
|
@ -10,7 +10,7 @@ Build transport demand.
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import xarray as xr
|
import xarray as xr
|
||||||
from helper import generate_periodic_profiles
|
from _helpers import generate_periodic_profiles
|
||||||
|
|
||||||
|
|
||||||
def build_nodal_transport_data(fn, pop_layout):
|
def build_nodal_transport_data(fn, pop_layout):
|
||||||
|
@ -1,91 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
# SPDX-FileCopyrightText: : 2020-2023 The PyPSA-Eur Authors
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: MIT
|
|
||||||
|
|
||||||
import contextlib
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import pandas as pd
|
|
||||||
import pytz
|
|
||||||
import yaml
|
|
||||||
from pypsa.components import component_attrs, components
|
|
||||||
from pypsa.descriptors import Dict
|
|
||||||
from snakemake.utils import update_config
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
# Define a context manager to temporarily mute print statements
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def mute_print():
|
|
||||||
with open(os.devnull, "w") as devnull:
|
|
||||||
with contextlib.redirect_stdout(devnull):
|
|
||||||
yield
|
|
||||||
|
|
||||||
|
|
||||||
def override_component_attrs(directory):
|
|
||||||
"""Tell PyPSA that links can have multiple outputs by
|
|
||||||
overriding the component_attrs. This can be done for
|
|
||||||
as many buses as you need with format busi for i = 2,3,4,5,....
|
|
||||||
See https://pypsa.org/doc/components.html#link-with-multiple-outputs-or-inputs
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
directory : string
|
|
||||||
Folder where component attributes to override are stored
|
|
||||||
analogous to ``pypsa/component_attrs``, e.g. `links.csv`.
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
Dictionary of overridden component attributes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
attrs = Dict({k: v.copy() for k, v in component_attrs.items()})
|
|
||||||
|
|
||||||
for component, list_name in components.list_name.items():
|
|
||||||
fn = f"{directory}/{list_name}.csv"
|
|
||||||
if os.path.isfile(fn):
|
|
||||||
overrides = pd.read_csv(fn, index_col=0, na_values="n/a")
|
|
||||||
attrs[component] = overrides.combine_first(attrs[component])
|
|
||||||
|
|
||||||
return attrs
|
|
||||||
|
|
||||||
|
|
||||||
def generate_periodic_profiles(dt_index, nodes, weekly_profile, localize=None):
|
|
||||||
"""
|
|
||||||
Give a 24*7 long list of weekly hourly profiles, generate this for each
|
|
||||||
country for the period dt_index, taking account of time zones and summer
|
|
||||||
time.
|
|
||||||
"""
|
|
||||||
|
|
||||||
weekly_profile = pd.Series(weekly_profile, range(24 * 7))
|
|
||||||
|
|
||||||
week_df = pd.DataFrame(index=dt_index, columns=nodes)
|
|
||||||
|
|
||||||
for node in nodes:
|
|
||||||
timezone = pytz.timezone(pytz.country_timezones[node[:2]][0])
|
|
||||||
tz_dt_index = dt_index.tz_convert(timezone)
|
|
||||||
week_df[node] = [24 * dt.weekday() + dt.hour for dt in tz_dt_index]
|
|
||||||
week_df[node] = week_df[node].map(weekly_profile)
|
|
||||||
|
|
||||||
week_df = week_df.tz_localize(localize)
|
|
||||||
|
|
||||||
return week_df
|
|
||||||
|
|
||||||
|
|
||||||
def parse(l):
|
|
||||||
if len(l) == 1:
|
|
||||||
return yaml.safe_load(l[0])
|
|
||||||
else:
|
|
||||||
return {l.pop(0): parse(l)}
|
|
||||||
|
|
||||||
|
|
||||||
def update_config_with_sector_opts(config, sector_opts):
|
|
||||||
for o in sector_opts.split("-"):
|
|
||||||
if o.startswith("CF+"):
|
|
||||||
l = o.split("+")[1:]
|
|
||||||
update_config(config, parse(l))
|
|
@ -13,7 +13,7 @@ import numpy as np
|
|||||||
import pandas as pd
|
import pandas as pd
|
||||||
import pypsa
|
import pypsa
|
||||||
import yaml
|
import yaml
|
||||||
from helper import override_component_attrs
|
from _helpers import override_component_attrs
|
||||||
from prepare_sector_network import prepare_costs
|
from prepare_sector_network import prepare_costs
|
||||||
|
|
||||||
idx = pd.IndexSlice
|
idx = pd.IndexSlice
|
||||||
|
@ -12,7 +12,7 @@ import geopandas as gpd
|
|||||||
import matplotlib.pyplot as plt
|
import matplotlib.pyplot as plt
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import pypsa
|
import pypsa
|
||||||
from helper import override_component_attrs
|
from _helpers import override_component_attrs
|
||||||
from make_summary import assign_carriers
|
from make_summary import assign_carriers
|
||||||
from plot_summary import preferred_order, rename_techs
|
from plot_summary import preferred_order, rename_techs
|
||||||
from pypsa.plot import add_legend_circles, add_legend_lines, add_legend_patches
|
from pypsa.plot import add_legend_circles, add_legend_lines, add_legend_patches
|
||||||
|
@ -14,7 +14,7 @@ import pandas as pd
|
|||||||
import pypsa
|
import pypsa
|
||||||
import xarray as xr
|
import xarray as xr
|
||||||
from build_energy_totals import build_co2_totals, build_eea_co2, build_eurostat_co2
|
from build_energy_totals import build_co2_totals, build_eea_co2, build_eurostat_co2
|
||||||
from helper import (
|
from _helpers import (
|
||||||
generate_periodic_profiles,
|
generate_periodic_profiles,
|
||||||
override_component_attrs,
|
override_component_attrs,
|
||||||
update_config_with_sector_opts,
|
update_config_with_sector_opts,
|
||||||
|
@ -10,7 +10,7 @@ import logging
|
|||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pypsa
|
import pypsa
|
||||||
from helper import override_component_attrs, update_config_with_sector_opts
|
from _helpers import override_component_attrs, update_config_with_sector_opts
|
||||||
from vresutils.benchmark import memory_logger
|
from vresutils.benchmark import memory_logger
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
Loading…
Reference in New Issue
Block a user