diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f4f2d637..1b3a4dfc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -5,7 +5,7 @@ exclude: "^LICENSES" repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.3.0 + rev: v4.4.0 hooks: - id: check-merge-conflict - id: end-of-file-fixer @@ -17,7 +17,7 @@ repos: # Sort package imports alphabetically - repo: https://github.com/PyCQA/isort - rev: 5.10.1 + rev: 5.11.4 hooks: - id: isort args: ["--profile", "black", "--filter-files"] @@ -30,7 +30,7 @@ repos: # Find common spelling mistakes in comments and docstrings - repo: https://github.com/codespell-project/codespell - rev: v2.2.1 + rev: v2.2.2 hooks: - id: codespell args: ['--ignore-regex="(\b[A-Z]+\b)"', '--ignore-words-list=fom'] # Ignore capital case words, e.g. country codes @@ -39,19 +39,19 @@ repos: # Make docstrings PEP 257 compliant - repo: https://github.com/PyCQA/docformatter - rev: v1.5.0 + rev: v1.6.0.rc1 hooks: - id: docformatter args: ["--in-place", "--make-summary-multi-line", "--pre-summary-newline"] - repo: https://github.com/keewis/blackdoc - rev: v0.3.7 + rev: v0.3.8 hooks: - id: blackdoc # Formatting with "black" coding style - repo: https://github.com/psf/black - rev: 22.8.0 + rev: 22.12.0 hooks: # Format Python files - id: black @@ -67,14 +67,14 @@ repos: # Do YAML formatting (before the linter checks it for misses) - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks - rev: v2.4.0 + rev: v2.6.0 hooks: - id: pretty-format-yaml args: [--autofix, --indent, "2", --preserve-quotes] # Format Snakemake rule / workflow files - repo: https://github.com/snakemake/snakefmt - rev: v0.6.1 + rev: v0.8.0 hooks: - id: snakefmt @@ -87,6 +87,6 @@ repos: # Check for FSFE REUSE compliance (licensing) - repo: https://github.com/fsfe/reuse-tool - rev: v0.14.0 + rev: v1.1.0 hooks: - id: reuse diff --git a/Snakefile b/Snakefile index 1a7cbe9f..f4f37049 100644 --- a/Snakefile +++ b/Snakefile @@ -68,7 +68,7 @@ if config["enable"].get("prepare_links_p_nom", False): "logs/" + RDIR + "prepare_links_p_nom.log", threads: 1 resources: - mem_mb=500, + mem_mb=1500, script: "scripts/prepare_links_p_nom.py" @@ -167,7 +167,7 @@ rule base_network: "benchmarks/" + RDIR + "base_network" threads: 1 resources: - mem_mb=500, + mem_mb=1500, script: "scripts/base_network.py" @@ -190,7 +190,7 @@ rule build_shapes: "logs/" + RDIR + "build_shapes.log", threads: 1 resources: - mem_mb=500, + mem_mb=1500, script: "scripts/build_shapes.py" @@ -657,7 +657,7 @@ rule make_summary: + RDIR + "make_summary/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}.log", resources: - mem_mb=500, + mem_mb=1500, script: "scripts/make_summary.py" @@ -676,7 +676,7 @@ rule plot_summary: + RDIR + "plot_summary/{summary}_elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}_{ext}.log", resources: - mem_mb=500, + mem_mb=1500, script: "scripts/plot_summary.py" @@ -706,6 +706,6 @@ rule plot_p_nom_max: + RDIR + "plot_p_nom_max/elec_s{simpl}_{clusts}_{techs}_{country}_{ext}.log", resources: - mem_mb=500, + mem_mb=1500, script: "scripts/plot_p_nom_max.py" diff --git a/config.default.yaml b/config.default.yaml index 775bce4e..b06c06b7 100755 --- a/config.default.yaml +++ b/config.default.yaml @@ -86,6 +86,7 @@ electricity: atlite: nprocesses: 4 + show_progress: false # false saves time cutouts: # use 'base' to determine geographical bounds and time span from config # base: @@ -262,6 +263,8 @@ clustering: algorithm: kmeans # choose from: [hac, kmeans] feature: solar+onwind-time # only for hac. choose from: [solar+onwind-time, solar+onwind-cap, solar-time, solar-cap, solar+offwind-cap] etc. exclude_carriers: [] + remove_stubs: true + remove_stubs_across_borders: true cluster_network: algorithm: kmeans feature: solar+onwind-time diff --git a/config.tutorial.yaml b/config.tutorial.yaml index 2fd74e4b..dc5347aa 100755 --- a/config.tutorial.yaml +++ b/config.tutorial.yaml @@ -56,6 +56,7 @@ electricity: atlite: nprocesses: 4 + show_progress: false # false saves time cutouts: be-03-2013-era5: module: era5 diff --git a/data/links_tyndp.csv b/data/links_tyndp.csv index 8079be72..a0603120 100644 --- a/data/links_tyndp.csv +++ b/data/links_tyndp.csv @@ -24,3 +24,5 @@ Gridlink,Kingsnorth (UK),Warande (FR),160,,1400,in permitting,,https://tyndp.ent NeuConnect,Grain (UK),Fedderwarden (DE),680,,1400,in permitting,,https://tyndp.entsoe.eu/tyndp2018/projects/projects/309,0.716666666666667,51.44,8.046524,53.562763 NordBalt,Klaipeda (LT),Nybro (SE),450,,700,built,,https://en.wikipedia.org/wiki/NordBalt,21.256667,55.681667,15.854167,56.767778 Estlink 1,Harku (EE),Espoo (FI),105,,350,built,,https://en.wikipedia.org/wiki/Estlink,24.560278,59.384722,24.551667,60.203889 +Greenlink,Waterford (IE),Pembroke (UK),,180,500,under construction,,https://tyndp2022-project-platform.azurewebsites.net/projectsheets/transmission/286,-6.987,52.260,-4.986,51.686 +Celtic Interconnector,Aghada (IE),La Martyre (FR),,572,700,under consideration,,https://tyndp2022-project-platform.azurewebsites.net/projectsheets/transmission/107,-8.16642,51.91413,-4.184,48.459 diff --git a/data/parameter_corrections.yaml b/data/parameter_corrections.yaml index b50fc03a..df15738a 100644 --- a/data/parameter_corrections.yaml +++ b/data/parameter_corrections.yaml @@ -36,12 +36,20 @@ Link: "5583": "7428" # bus0 == bus1 to remove link in remove_unconnected_components (Sardinia) "13588": "7428" # bus0 == bus1 to remove link in remove_unconnected_components (Sardinia) "T23": "6355" # bus0 == bus1 to remove link in remove_unconnected_components (NordBalt) + "14815": "5939" # Kainachtal + "8706": "6448" bus1: index: "12931": "8152" # BorWin3 "5582": "2382" # combine link 5583 + 5582 in 5582 (Sardinia) "13589": "1349" # combine link 13589 + 13588 in 13589 (Sardinia) "14820": "6354" # NordBalt + "14810": "6365" # Skagerrak + "8708": "6448" + "8394": "6695" + "14813": "7052" + "8009": "5939" + "5601": "7052" # Link Sweden - Lübeck length: index: "5582": 26.39 # new length of combined links (sum) @@ -53,6 +61,7 @@ Line: bus0: index: "14573": "7179" #fix bus-id substation in PT (220/380kV issue) + "14756": "8577" # Deeside connection v_nom: index: "14573": 220 # 220/380kV issue of substation in PT diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 263695d1..adcbfb58 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -10,6 +10,10 @@ Release Notes Upcoming Release ================ +* Fix EQ constraint for the case no hydro inflow is available + +* Bugfix in the reserve constraint will increase demand related reserve requirements + **New Features** * Carriers of generators can now be excluded from aggregation in clustering network and simplify network. diff --git a/doc/tutorial.rst b/doc/tutorial.rst index 75bd350b..297c2c10 100644 --- a/doc/tutorial.rst +++ b/doc/tutorial.rst @@ -61,14 +61,16 @@ It is also possible to allow less or more carbon-dioxide emissions. Here, we lim .. literalinclude:: ../config.tutorial.yaml :language: yaml - :lines: 35,37 + :start-at: electricity: + :end-before: exentable_carriers: PyPSA-Eur also includes a database of existing conventional powerplants. We can select which types of powerplants we like to be included: .. literalinclude:: ../config.tutorial.yaml :language: yaml - :lines: 35,51 + :start-at: extendable_carriers: + :end-before: max_hours: To accurately model the temporal and spatial availability of renewables such as wind and solar energy, we rely on historical weather data. It is advisable to adapt the required range of coordinates to the selection of countries. @@ -83,14 +85,21 @@ For example, we may want to use the ERA-5 dataset for solar and not the default .. literalinclude:: ../config.tutorial.yaml :language: yaml - :lines: 63,106,107 + :start-at: be-03-2013-era5: + :end-at: module: + +.. literalinclude:: ../config.tutorial.yaml + :language: yaml + :start-at: solar: + :end-at: cutout: Finally, it is possible to pick a solver. For instance, this tutorial uses the open-source solvers CBC and Ipopt and does not rely on the commercial solvers Gurobi or CPLEX (for which free academic licenses are available). .. literalinclude:: ../config.tutorial.yaml :language: yaml - :lines: 188,198,199 + :start-at: solver: + :end-before: plotting: .. note:: diff --git a/envs/environment.yaml b/envs/environment.yaml index 5dc207a2..ca2580b1 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -10,7 +10,7 @@ dependencies: - python>=3.8 - pip -- pypsa>=0.20 +- pypsa>=0.21.3 - atlite>=0.2.9 - dask @@ -25,19 +25,19 @@ dependencies: - pytables - lxml - powerplantmatching>=0.5.4 -- numpy +- numpy<1.24 - pandas>=1.4 - geopandas>=0.11.0 - xarray - netcdf4 - networkx - scipy -- shapely<2.0 # need to address deprecations +- shapely<2.0 - progressbar2 - pyomo -- matplotlib +- matplotlib<3.6 - proj -- fiona <= 1.18.20 # Till issue https://github.com/Toblerity/Fiona/issues/1085 is not solved +- fiona - country_converter # Keep in conda environment when calling ipython diff --git a/scripts/_helpers.py b/scripts/_helpers.py index ba88efb2..127e42a8 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -28,7 +28,6 @@ def configure_logging(snakemake, skip_handlers=False): skip_handlers : True | False (default) Do (not) skip the default handlers created for redirecting output to STDERR and file. """ - import logging kwargs = snakemake.config.get("logging", dict()).copy() diff --git a/scripts/add_electricity.py b/scripts/add_electricity.py index 3d9babfd..85391a64 100755 --- a/scripts/add_electricity.py +++ b/scripts/add_electricity.py @@ -111,7 +111,6 @@ def calculate_annuity(n, r): discount rate of r, e.g. annuity(20, 0.05) * 20 = 1.6 """ - if isinstance(r, pd.Series): return pd.Series(1 / n, index=r.index).where( r == 0, r / (1.0 - 1.0 / (1.0 + r) ** n) diff --git a/scripts/build_bus_regions.py b/scripts/build_bus_regions.py index a26aaeb7..ef4cc396 100644 --- a/scripts/build_bus_regions.py +++ b/scripts/build_bus_regions.py @@ -69,7 +69,6 @@ def voronoi_partition_pts(points, outline): ------- polygons : N - ndarray[dtype=Polygon|MultiPolygon] """ - points = np.asarray(points) if len(points) == 1: @@ -107,7 +106,7 @@ def voronoi_partition_pts(points, outline): polygons.append(poly) - return np.array(polygons, dtype=object) + return polygons if __name__ == "__main__": diff --git a/scripts/build_load_data.py b/scripts/build_load_data.py index 482256ba..780a651e 100755 --- a/scripts/build_load_data.py +++ b/scripts/build_load_data.py @@ -189,7 +189,6 @@ def manual_adjustment(load, fn_load, powerstatistics): Manual adjusted and interpolated load time-series with UTC timestamps x ISO-2 countries """ - if powerstatistics: if "MK" in load.columns: if "AL" not in load.columns or load.AL.isnull().values.all(): diff --git a/scripts/build_natura_raster.py b/scripts/build_natura_raster.py index 0b5ce34f..33e4cf99 100644 --- a/scripts/build_natura_raster.py +++ b/scripts/build_natura_raster.py @@ -84,7 +84,7 @@ if __name__ == "__main__": # adjusted boundaries shapes = gpd.read_file(snakemake.input.natura).to_crs(3035) - raster = ~geometry_mask(shapes.geometry, out_shape[::-1], transform) + raster = ~geometry_mask(shapes.geometry, out_shape, transform) raster = raster.astype(rio.uint8) with rio.open( diff --git a/scripts/build_shapes.py b/scripts/build_shapes.py index 79409a26..cda27963 100644 --- a/scripts/build_shapes.py +++ b/scripts/build_shapes.py @@ -129,6 +129,8 @@ def countries(naturalearth, country_list): s = df.set_index("name")["geometry"].map(_simplify_polys) if "RS" in country_list: s["RS"] = s["RS"].union(s.pop("KV")) + # cleanup shape union + s["RS"] = Polygon(s["RS"].exterior.coords) return s @@ -145,7 +147,7 @@ def eez(country_shapes, eez, country_list): lambda s: _simplify_polys(s, filterremote=False) ) s = gpd.GeoSeries( - {k: v for k, v in s.iteritems() if v.distance(country_shapes[k]) < 1e-3} + {k: v for k, v in s.items() if v.distance(country_shapes[k]) < 1e-3} ) s = s.to_frame("geometry") s.index.name = "name" diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 63fec077..80998b20 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -222,7 +222,6 @@ def distribute_clusters(n, n_clusters, focus_weights=None, solver_name="cbc"): """ Determine the number of clusters per country. """ - L = ( n.loads_t.p_set.mean() .groupby(n.loads.bus) diff --git a/scripts/make_summary.py b/scripts/make_summary.py index 873a2a8b..4c3c675a 100644 --- a/scripts/make_summary.py +++ b/scripts/make_summary.py @@ -235,7 +235,6 @@ def calculate_supply(n, label, supply): calculate the max dispatch of each component at the buses where the loads are attached. """ - load_types = n.buses.carrier.unique() for i in load_types: @@ -296,7 +295,6 @@ def calculate_supply_energy(n, label, supply_energy): calculate the total dispatch of each component at the buses where the loads are attached. """ - load_types = n.buses.carrier.unique() for i in load_types: @@ -556,9 +554,13 @@ if __name__ == "__main__": opts="Co2L-24H", country="all", ) - network_dir = os.path.join("..", "results", "networks") + network_dir = os.path.join( + "..", "results", "networks", snakemake.config["run"]["name"] + ) else: - network_dir = os.path.join("results", "networks") + network_dir = os.path.join( + "results", "networks", snakemake.config["run"]["name"] + ) configure_logging(snakemake) config = snakemake.config diff --git a/scripts/plot_summary.py b/scripts/plot_summary.py index c48571ba..c05db561 100644 --- a/scripts/plot_summary.py +++ b/scripts/plot_summary.py @@ -92,7 +92,7 @@ def plot_costs(infn, config, fn=None): print(df.sum()) - new_index = (preferred_order & df.index).append( + new_index = (preferred_order.intersection(df.index)).append( df.index.difference(preferred_order) ) @@ -149,7 +149,7 @@ def plot_energy(infn, config, fn=None): print(df.sum()) - new_index = (preferred_order & df.index).append( + new_index = (preferred_order.intersection(df.index)).append( df.index.difference(preferred_order) ) diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 2c7f8413..7aa7a732 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -111,11 +111,15 @@ def simplify_network_to_380(n): """ Fix all lines to a voltage level of 380 kV and remove all transformers. - The function preserves the transmission capacity for each line while updating - its voltage level, line type and number of parallel bundles (num_parallel). + The function preserves the transmission capacity for each line while + updating + its voltage level, line type and number of parallel bundles + (num_parallel). - Transformers are removed and connected components are moved from their - starting bus to their ending bus. The corresponding starting buses are + Transformers are removed and connected components are moved from + their + starting bus to their ending bus. The corresponding starting buses + are removed as well. """ logger.info("Mapping all network lines onto a single 380kV layer") @@ -217,7 +221,7 @@ def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus, out tech, ", ".join( "{:.0f} Eur/MW/a for `{}`".format(d, b) - for b, d in costs.iteritems() + for b, d in costs.items() ), ) ) @@ -369,7 +373,7 @@ def simplify_links(n, costs, config, output, aggregation_strategies=dict()): n.mremove("Link", all_links) static_attrs = n.components["Link"]["attrs"].loc[lambda df: df.static] - for attr, default in static_attrs.default.iteritems(): + for attr, default in static_attrs.default.items(): params.setdefault(attr, default) n.links.loc[name] = pd.Series(params) @@ -395,7 +399,11 @@ def simplify_links(n, costs, config, output, aggregation_strategies=dict()): def remove_stubs(n, costs, config, output, aggregation_strategies=dict()): logger.info("Removing stubs") - busmap = busmap_by_stubs(n) # ['country']) + across_borders = config["clustering"]["simplify_network"].get( + "remove_stubs_across_borders", True + ) + matching_attrs = [] if across_borders else ["country"] + busmap = busmap_by_stubs(n, matching_attrs) connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap, costs, config) @@ -530,22 +538,20 @@ if __name__ == "__main__": n, technology_costs, snakemake.config, snakemake.output, aggregation_strategies ) - n, stub_map = remove_stubs( - n, - technology_costs, - snakemake.config, - snakemake.output, - aggregation_strategies=aggregation_strategies, - ) + busmaps = [trafo_map, simplify_links_map] - busmaps = [trafo_map, simplify_links_map, stub_map] + cluster_config = snakemake.config["clustering"]["simplify_network"] + if cluster_config.get("remove_stubs", True): + n, stub_map = remove_stubs( + n, + technology_costs, + snakemake.config, + snakemake.output, + aggregation_strategies=aggregation_strategies, + ) + busmaps.append(stub_map) - cluster_config = snakemake.config.get("clustering", {}).get("simplify_network", {}) - if ( - cluster_config.get("clustering", {}) - .get("simplify_network", {}) - .get("to_substations", False) - ): + if cluster_config.get("to_substations", False): n, substation_map = aggregate_to_substations(n, aggregation_strategies) busmaps.append(substation_map) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 4127b273..8a964593 100755 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -216,18 +216,21 @@ def add_EQ_constraints(n, o, scaling=1e-1): .T.groupby(ggrouper, axis=1) .apply(join_exprs) ) - lhs_spill = ( - linexpr( - ( - -n.snapshot_weightings.stores * scaling, - get_var(n, "StorageUnit", "spill").T, + if not n.storage_units_t.inflow.empty: + lhs_spill = ( + linexpr( + ( + -n.snapshot_weightings.stores * scaling, + get_var(n, "StorageUnit", "spill").T, + ) ) + .T.groupby(sgrouper, axis=1) + .apply(join_exprs) ) - .T.groupby(sgrouper, axis=1) - .apply(join_exprs) - ) - lhs_spill = lhs_spill.reindex(lhs_gen.index).fillna("") - lhs = lhs_gen + lhs_spill + lhs_spill = lhs_spill.reindex(lhs_gen.index).fillna("") + lhs = lhs_gen + lhs_spill + else: + lhs = lhs_gen define_constraints(n, lhs, ">=", rhs, "equity", "min") @@ -278,7 +281,7 @@ def add_operational_reserve_margin_constraint(n, config): ).sum(1) # Total demand at t - demand = n.loads_t.p.sum(1) + demand = n.loads_t.p_set.sum(1) # VRES potential of non extendable generators capacity_factor = n.generators_t.p_max_pu[vres_i.difference(ext_i)] @@ -321,7 +324,6 @@ def add_operational_reserve_margin(n, sns, config): Build reserve margin constraints based on the formulation given in https://genxproject.github.io/GenX/dev/core/#Reserves. """ - define_variables(n, 0, np.inf, "Generator", "r", axes=[sns, n.generators.index]) add_operational_reserve_margin_constraint(n, config) @@ -389,11 +391,7 @@ def solve_network(n, config, opts="", **kwargs): if skip_iterations: network_lopf( - n, - solver_name=solver_name, - solver_options=solver_options, - extra_functionality=extra_functionality, - **kwargs + n, solver_name=solver_name, solver_options=solver_options, **kwargs ) else: ilopf( @@ -403,7 +401,6 @@ def solve_network(n, config, opts="", **kwargs): track_iterations=track_iterations, min_iterations=min_iterations, max_iterations=max_iterations, - extra_functionality=extra_functionality, **kwargs ) return n @@ -432,6 +429,7 @@ if __name__ == "__main__": n, snakemake.config, opts, + extra_functionality=extra_functionality, solver_dir=tmpdir, solver_logfile=snakemake.log.solver, ) diff --git a/test/config.test1.yaml b/test/config.test1.yaml index 56f0425f..253446ef 100755 --- a/test/config.test1.yaml +++ b/test/config.test1.yaml @@ -54,6 +54,7 @@ electricity: atlite: nprocesses: 4 + show_progress: false # false saves time cutouts: be-03-2013-era5: module: era5