scripts/* revise imports

This commit is contained in:
Fabian 2019-11-14 17:50:24 +01:00
parent bb9bbf295a
commit e59d2c4162
11 changed files with 50 additions and 79 deletions

View File

@ -88,24 +88,21 @@ It further adds extendable ``generators`` and ``storage_units`` with **zero** ca
- additional open- and combined-cycle gas turbines (if ``OCGT`` and/or ``CCGT`` is listed in the config setting ``electricity: extendable_carriers``) - additional open- and combined-cycle gas turbines (if ``OCGT`` and/or ``CCGT`` is listed in the config setting ``electricity: extendable_carriers``)
""" """
import logging
logger = logging.getLogger(__name__)
import pandas as pd
idx = pd.IndexSlice
import numpy as np
import xarray as xr
import geopandas as gpd
from vresutils.costdata import annuity from vresutils.costdata import annuity
from vresutils.load import timeseries_opsd from vresutils.load import timeseries_opsd
from vresutils import transfer as vtransfer from vresutils import transfer as vtransfer
import logging
import pandas as pd
import numpy as np
import xarray as xr
import geopandas as gpd
import pypsa import pypsa
import powerplantmatching as ppm import powerplantmatching as ppm
idx = pd.IndexSlice
logger = logging.getLogger(__name__)
def normed(s): return s/s.sum() def normed(s): return s/s.sum()

View File

@ -8,7 +8,7 @@ Relevant Settings
countries: countries:
.. seealso:: .. seealso::
Documentation of the configuration file ``config.yaml`` at Documentation of the configuration file ``config.yaml`` at
:ref:`toplevel_cf` :ref:`toplevel_cf`
@ -36,14 +36,13 @@ Description
----------- -----------
""" """
from vresutils.graph import voronoi_partition_pts
import os import os
from operator import attrgetter
import pandas as pd import pandas as pd
import geopandas as gpd import geopandas as gpd
from vresutils.graph import voronoi_partition_pts
import pypsa import pypsa
import logging import logging

View File

@ -15,7 +15,7 @@ Relevant Settings
resource: resource:
correction_factor: correction_factor:
.. seealso:: .. seealso::
Documentation of the configuration file ``config.yaml`` at Documentation of the configuration file ``config.yaml`` at
:ref:`snapshots_cf`, :ref:`renewable_cf` :ref:`snapshots_cf`, :ref:`renewable_cf`
@ -64,7 +64,6 @@ import xarray as xr
import pandas as pd import pandas as pd
import geokit as gk import geokit as gk
from osgeo import gdal
from scipy.sparse import vstack from scipy.sparse import vstack
import pycountry as pyc import pycountry as pyc
import matplotlib.pyplot as plt import matplotlib.pyplot as plt

View File

@ -14,7 +14,7 @@ Relevant Settings
cutout: cutout:
clip_min_inflow: clip_min_inflow:
.. seealso:: .. seealso::
Documentation of the configuration file ``config.yaml`` at Documentation of the configuration file ``config.yaml`` at
:ref:`toplevel_cf`, :ref:`renewable_cf` :ref:`toplevel_cf`, :ref:`renewable_cf`
@ -43,7 +43,7 @@ Outputs
.. image:: ../img/inflow-ts.png .. image:: ../img/inflow-ts.png
:scale: 33 % :scale: 33 %
.. image:: ../img/inflow-box.png .. image:: ../img/inflow-box.png
:scale: 33 % :scale: 33 %
@ -56,7 +56,6 @@ Description
import os import os
import atlite import atlite
import pandas as pd
import geopandas as gpd import geopandas as gpd
from vresutils import hydro as vhydro from vresutils import hydro as vhydro
import logging import logging

View File

@ -10,7 +10,7 @@ Relevant Settings
{technology}: {technology}:
cutout: cutout:
.. seealso:: .. seealso::
Documentation of the configuration file ``config.yaml`` at Documentation of the configuration file ``config.yaml`` at
:ref:`renewable_cf` :ref:`renewable_cf`
@ -37,7 +37,6 @@ Description
import numpy as np import numpy as np
import atlite import atlite
from osgeo import gdal
import geokit as gk import geokit as gk
def determine_cutout_xXyY(cutout_name): def determine_cutout_xXyY(cutout_name):

View File

@ -15,7 +15,7 @@ Relevant Settings
electricity: electricity:
max_hours: max_hours:
.. seealso:: .. seealso::
Documentation of the configuration file ``config.yaml`` at Documentation of the configuration file ``config.yaml`` at
:ref:`costs_cf`, :ref:`electricity_cf` :ref:`costs_cf`, :ref:`electricity_cf`
@ -51,7 +51,6 @@ Replacing '/summaries/' with '/plots/' creates nice colored maps of the results.
import os import os
from six import iteritems from six import iteritems
from itertools import product
import pandas as pd import pandas as pd
import pypsa import pypsa
@ -94,7 +93,7 @@ def calculate_costs(n,label,costs):
for c in n.iterate_components(n.branch_components|n.controllable_one_port_components^{"Load"}): for c in n.iterate_components(n.branch_components|n.controllable_one_port_components^{"Load"}):
capital_costs = c.df.capital_cost*c.df[opt_name.get(c.name,"p") + "_nom_opt"] capital_costs = c.df.capital_cost*c.df[opt_name.get(c.name,"p") + "_nom_opt"]
capital_costs_grouped = capital_costs.groupby(c.df.carrier).sum() capital_costs_grouped = capital_costs.groupby(c.df.carrier).sum()
# Index tuple(s) indicating the newly to-be-added row(s) # Index tuple(s) indicating the newly to-be-added row(s)
raw_index = tuple([[c.list_name],["capital"],list(capital_costs_grouped.index)]) raw_index = tuple([[c.list_name],["capital"],list(capital_costs_grouped.index)])
costs = _add_indexed_rows(costs, raw_index) costs = _add_indexed_rows(costs, raw_index)
@ -149,7 +148,7 @@ def include_in_summary(summary, multiindexprefix, label, item):
# Index tuple(s) indicating the newly to-be-added row(s) # Index tuple(s) indicating the newly to-be-added row(s)
raw_index = tuple([multiindexprefix,list(item.index)]) raw_index = tuple([multiindexprefix,list(item.index)])
summary = _add_indexed_rows(summary, raw_index) summary = _add_indexed_rows(summary, raw_index)
summary.loc[idx[raw_index], label] = item.values summary.loc[idx[raw_index], label] = item.values
return summary return summary
@ -191,11 +190,11 @@ def calculate_supply(n,label,supply):
continue continue
s = c.pnl.p[items].max().multiply(c.df.loc[items,'sign']).groupby(c.df.loc[items,'carrier']).sum() s = c.pnl.p[items].max().multiply(c.df.loc[items,'sign']).groupby(c.df.loc[items,'carrier']).sum()
# Index tuple(s) indicating the newly to-be-added row(s) # Index tuple(s) indicating the newly to-be-added row(s)
raw_index = tuple([[i],[c.list_name],list(s.index)]) raw_index = tuple([[i],[c.list_name],list(s.index)])
supply = _add_indexed_rows(supply, raw_index) supply = _add_indexed_rows(supply, raw_index)
supply.loc[idx[raw_index],label] = s.values supply.loc[idx[raw_index],label] = s.values
@ -237,11 +236,11 @@ def calculate_supply_energy(n,label,supply_energy):
continue continue
s = c.pnl.p[items].sum().multiply(c.df.loc[items,'sign']).groupby(c.df.loc[items,'carrier']).sum() s = c.pnl.p[items].sum().multiply(c.df.loc[items,'sign']).groupby(c.df.loc[items,'carrier']).sum()
# Index tuple(s) indicating the newly to-be-added row(s) # Index tuple(s) indicating the newly to-be-added row(s)
raw_index = tuple([[i],[c.list_name],list(s.index)]) raw_index = tuple([[i],[c.list_name],list(s.index)])
supply_energy = _add_indexed_rows(supply_energy, raw_index) supply_energy = _add_indexed_rows(supply_energy, raw_index)
supply_energy.loc[idx[raw_index],label] = s.values supply_energy.loc[idx[raw_index],label] = s.values
@ -491,7 +490,7 @@ if __name__ == "__main__":
for opts in expand_from_wildcard("opts")} for opts in expand_from_wildcard("opts")}
print(networks_dict) print(networks_dict)
dfs = make_summaries(networks_dict, country=snakemake.wildcards.country) dfs = make_summaries(networks_dict, country=snakemake.wildcards.country)
to_csv(dfs) to_csv(dfs)

View File

@ -15,20 +15,11 @@ Description
""" """
import pypsa
from _helpers import load_network, aggregate_p, aggregate_costs from _helpers import load_network, aggregate_p, aggregate_costs
from vresutils import plot as vplot
import os
import pypsa
import pandas as pd import pandas as pd
import geopandas as gpd
import numpy as np import numpy as np
from itertools import product, chain from six.moves import zip
from six.moves import map, zip
from six import itervalues, iterkeys
from collections import OrderedDict as odict
import logging import logging
import cartopy.crs as ccrs import cartopy.crs as ccrs
@ -36,7 +27,6 @@ import matplotlib.pyplot as plt
import matplotlib as mpl import matplotlib as mpl
from matplotlib.patches import Circle, Ellipse from matplotlib.patches import Circle, Ellipse
from matplotlib.legend_handler import HandlerPatch from matplotlib.legend_handler import HandlerPatch
import seaborn as sns
to_rgba = mpl.colors.colorConverter.to_rgba to_rgba = mpl.colors.colorConverter.to_rgba
def make_handler_map_to_scale_circles_as_in(ax, dont_resize_actively=False): def make_handler_map_to_scale_circles_as_in(ax, dont_resize_actively=False):
@ -92,7 +82,7 @@ def plot_map(n, ax=None, attribute='p_nom', opts={}):
line_widths_exp = dict(Line=n.lines.s_nom_opt, Link=n.links.p_nom_opt) line_widths_exp = dict(Line=n.lines.s_nom_opt, Link=n.links.p_nom_opt)
line_widths_cur = dict(Line=n.lines.s_nom_min, Link=n.links.p_nom_min) line_widths_cur = dict(Line=n.lines.s_nom_min, Link=n.links.p_nom_min)
else: else:
raise 'plotting of {} has not been implemented yet'.format(plot) raise 'plotting of {} has not been implemented yet'.format(attribute)
line_colors_with_alpha = \ line_colors_with_alpha = \
@ -140,11 +130,11 @@ def plot_map(n, ax=None, attribute='p_nom', opts={}):
handles.append(plt.Line2D([0],[0],color=line_colors['exp'], handles.append(plt.Line2D([0],[0],color=line_colors['exp'],
linewidth=s*1e3/linewidth_factor)) linewidth=s*1e3/linewidth_factor))
labels.append("{} GW".format(s)) labels.append("{} GW".format(s))
l1 = l1_1 = ax.legend(handles, labels, l1_1 = ax.legend(handles, labels,
loc="upper left", bbox_to_anchor=(0.24, 1.01), loc="upper left", bbox_to_anchor=(0.24, 1.01),
frameon=False, frameon=False,
labelspacing=0.8, handletextpad=1.5, labelspacing=0.8, handletextpad=1.5,
title='Transmission Exist./Exp. ') title='Transmission Exist./Exp. ')
ax.add_artist(l1_1) ax.add_artist(l1_1)
handles = [] handles = []

View File

@ -10,7 +10,7 @@ Relevant Settings
enable: enable:
prepare_links_p_nom: prepare_links_p_nom:
.. seealso:: .. seealso::
Documentation of the configuration file ``config.yaml`` at Documentation of the configuration file ``config.yaml`` at
:ref:`toplevel_cf` :ref:`toplevel_cf`
@ -32,7 +32,6 @@ Description
""" """
import pandas as pd import pandas as pd
import numpy as np
if __name__ == "__main__": if __name__ == "__main__":
links_p_nom = pd.read_html('https://en.wikipedia.org/wiki/List_of_HVDC_projects', header=0, match="SwePol")[0] links_p_nom = pd.read_html('https://en.wikipedia.org/wiki/List_of_HVDC_projects', header=0, match="SwePol")[0]

View File

@ -32,7 +32,7 @@ Relevant Settings
solver: solver:
name: name:
.. seealso:: .. seealso::
Documentation of the configuration file ``config.yaml`` at Documentation of the configuration file ``config.yaml`` at
:ref:`costs_cf`, :ref:`electricity_cf`, :ref:`renewable_cf`, :ref:`costs_cf`, :ref:`electricity_cf`, :ref:`renewable_cf`,
:ref:`lines_cf`, :ref:`links_cf`, :ref:`solving_cf` :ref:`lines_cf`, :ref:`links_cf`, :ref:`solving_cf`
@ -78,21 +78,14 @@ The rule :mod:`simplify_network` does up to four things:
4. Optionally, if an integer were provided for the wildcard ``{simpl}`` (e.g. ``networks/elec_s500.nc``), the network is clustered to this number of clusters with the routines from the ``cluster_network`` rule with the function ``cluster_network.cluster(...)``. This step is usually skipped! 4. Optionally, if an integer were provided for the wildcard ``{simpl}`` (e.g. ``networks/elec_s500.nc``), the network is clustered to this number of clusters with the routines from the ``cluster_network`` rule with the function ``cluster_network.cluster(...)``. This step is usually skipped!
""" """
import pandas as pd from cluster_network import clustering_for_n_clusters, cluster_regions
idx = pd.IndexSlice from add_electricity import load_costs
import logging import logging
logger = logging.getLogger(__name__) import pandas as pd
import os
import re
import numpy as np import numpy as np
import scipy as sp import scipy as sp
from scipy.sparse.csgraph import connected_components, dijkstra from scipy.sparse.csgraph import connected_components, dijkstra
import xarray as xr
import geopandas as gpd
import shapely
import networkx as nx
from six import iteritems from six import iteritems
from six.moves import reduce from six.moves import reduce
@ -101,8 +94,9 @@ import pypsa
from pypsa.io import import_components_from_dataframe, import_series_from_dataframe from pypsa.io import import_components_from_dataframe, import_series_from_dataframe
from pypsa.networkclustering import busmap_by_stubs, aggregategenerators, aggregateoneport from pypsa.networkclustering import busmap_by_stubs, aggregategenerators, aggregateoneport
from cluster_network import clustering_for_n_clusters, cluster_regions
from add_electricity import load_costs logger = logging.getLogger(__name__)
idx = pd.IndexSlice
def simplify_network_to_380(n): def simplify_network_to_380(n):
## All goes to v_nom == 380 ## All goes to v_nom == 380

View File

@ -27,7 +27,7 @@ Relevant Settings
(plotting:) (plotting:)
(conv_techs:) (conv_techs:)
.. seealso:: .. seealso::
Documentation of the configuration file ``config.yaml`` at Documentation of the configuration file ``config.yaml`` at
:ref:`electricity_cf`, :ref:`solving_cf`, :ref:`plotting_cf` :ref:`electricity_cf`, :ref:`solving_cf`, :ref:`plotting_cf`
@ -72,7 +72,7 @@ Details (and errors made through this heuristic) are discussed in the paper
.. tip:: .. tip::
The rule :mod:`solve_all_networks` runs The rule :mod:`solve_all_networks` runs
for all ``scenario`` s in the configuration file for all ``scenario`` s in the configuration file
the rule :mod:`solve_network`. the rule :mod:`solve_network`.
""" """
@ -82,7 +82,6 @@ import pandas as pd
import logging import logging
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
import gc import gc
import os
import pypsa import pypsa
from pypsa.descriptors import free_output_series_dataframes from pypsa.descriptors import free_output_series_dataframes
@ -172,20 +171,20 @@ def add_opts_constraints(n, opts=None):
def agg_p_nom_min_rule(model, country, carrier): def agg_p_nom_min_rule(model, country, carrier):
min = agg_p_nom_minmax.at[(country, carrier), 'min'] min = agg_p_nom_minmax.at[(country, carrier), 'min']
return ((sum(model.generator_p_nom[gen] return ((sum(model.generator_p_nom[gen]
for gen in n.generators.index[(gen_country == country) & (n.generators.carrier == carrier)]) for gen in n.generators.index[(gen_country == country) & (n.generators.carrier == carrier)])
>= min) >= min)
if np.isfinite(min) else pypsa.opt.Constraint.Skip) if np.isfinite(min) else pypsa.opt.Constraint.Skip)
def agg_p_nom_max_rule(model, country, carrier): def agg_p_nom_max_rule(model, country, carrier):
max = agg_p_nom_minmax.at[(country, carrier), 'max'] max = agg_p_nom_minmax.at[(country, carrier), 'max']
return ((sum(model.generator_p_nom[gen] return ((sum(model.generator_p_nom[gen]
for gen in n.generators.index[(gen_country == country) & (n.generators.carrier == carrier)]) for gen in n.generators.index[(gen_country == country) & (n.generators.carrier == carrier)])
<= max) <= max)
if np.isfinite(max) else pypsa.opt.Constraint.Skip) if np.isfinite(max) else pypsa.opt.Constraint.Skip)
n.model.agg_p_nom_min = pypsa.opt.Constraint(list(agg_p_nom_minmax.index), rule=agg_p_nom_min_rule) n.model.agg_p_nom_min = pypsa.opt.Constraint(list(agg_p_nom_minmax.index), rule=agg_p_nom_min_rule)
n.model.agg_p_nom_max = pypsa.opt.Constraint(list(agg_p_nom_minmax.index), rule=agg_p_nom_max_rule) n.model.agg_p_nom_max = pypsa.opt.Constraint(list(agg_p_nom_minmax.index), rule=agg_p_nom_max_rule)
def add_lv_constraint(n): def add_lv_constraint(n):
line_volume = getattr(n, 'line_volume_limit', None) line_volume = getattr(n, 'line_volume_limit', None)
if line_volume is not None and not np.isinf(line_volume): if line_volume is not None and not np.isinf(line_volume):
@ -261,9 +260,9 @@ def solve_network(n, config=None, solver_log=None, opts=None, callback=None,
free_output_series_dataframes(n) free_output_series_dataframes(n)
pypsa.opf.network_lopf_build_model(n, formulation=solve_opts['formulation']) pypsa.opf.network_lopf_build_model(n, formulation=solve_opts['formulation'])
add_opts_constraints(n, opts) add_opts_constraints(n, opts)
if not fix_ext_lines: if not fix_ext_lines:
add_lv_constraint(n) add_lv_constraint(n)
add_lc_constraint(n) add_lc_constraint(n)

View File

@ -21,7 +21,7 @@ Relevant Settings
name: name:
(solveroptions): (solveroptions):
.. seealso:: .. seealso::
Documentation of the configuration file ``config.yaml`` at Documentation of the configuration file ``config.yaml`` at
:ref:`solving_cf` :ref:`solving_cf`
@ -40,20 +40,17 @@ Description
""" """
import numpy as np
import pandas as pd
import logging
logger = logging.getLogger(__name__)
from solve_network import patch_pyomo_tmpdir, prepare_network, solve_network from solve_network import patch_pyomo_tmpdir, prepare_network, solve_network
import logging
import pypsa import pypsa
logger = logging.getLogger(__name__)
if __name__ == "__main__": if __name__ == "__main__":
# Detect running outside of snakemake and mock snakemake for testing # Detect running outside of snakemake and mock snakemake for testing
if 'snakemake' not in globals(): if 'snakemake' not in globals():
from vresutils.snakemake import MockSnakemake, Dict from vresutils.snakemake import MockSnakemake
snakemake = MockSnakemake( snakemake = MockSnakemake(
wildcards=dict(network='elec', simpl='', clusters='45', lv='1.25', opts='Co2L-3H'), wildcards=dict(network='elec', simpl='', clusters='45', lv='1.25', opts='Co2L-3H'),
input=["networks/{network}_s{simpl}_{clusters}_lv{lv}_{opts}.nc"], input=["networks/{network}_s{simpl}_{clusters}_lv{lv}_{opts}.nc"],