merge master

This commit is contained in:
Fabian Neumann 2022-02-18 10:24:26 +01:00
commit 52b0d29fae
65 changed files with 1667 additions and 2028 deletions

90
.github/workflows/ci.yaml vendored Normal file
View File

@ -0,0 +1,90 @@
# SPDX-FileCopyrightText: : 2021 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: CC0-1.0
name: CI
# Caching method based on and described by:
# epassaro (2021): https://dev.to/epassaro/caching-anaconda-environments-in-github-actions-5hde
# and code in GitHub repo: https://github.com/epassaro/cache-conda-envs
on:
push:
branches:
- master
pull_request:
branches:
- master
schedule:
- cron: "0 5 * * TUE"
env:
CACHE_NUMBER: 1 # Change this value to manually reset the environment cache
jobs:
build:
strategy:
matrix:
include:
# Matrix required to handle caching with Mambaforge
- os: ubuntu-latest
label: ubuntu-latest
prefix: /usr/share/miniconda3/envs/pypsa-eur
- os: macos-latest
label: macos-latest
prefix: /Users/runner/miniconda3/envs/pypsa-eur
- os: windows-latest
label: windows-latest
prefix: C:\Miniconda3\envs\pypsa-eur
name: ${{ matrix.label }}
runs-on: ${{ matrix.os }}
defaults:
run:
shell: bash -l {0}
steps:
- uses: actions/checkout@v2
- name: Setup secrets
run: |
echo -ne "url: ${CDSAPI_URL}\nkey: ${CDSAPI_TOKEN}\n" > ~/.cdsapirc
- name: Add solver to environment
run: |
echo -e " - glpk\n - ipopt<3.13.3" >> envs/environment.yaml
- name: Setup Mambaforge
uses: conda-incubator/setup-miniconda@v2
with:
miniforge-variant: Mambaforge
miniforge-version: latest
activate-environment: pypsa-eur
use-mamba: true
- name: Set cache date
run: echo "DATE=$(date +'%Y%m%d')" >> $GITHUB_ENV
- name: Create environment cache
uses: actions/cache@v2
id: cache
with:
path: ${{ matrix.prefix }}
key: ${{ matrix.label }}-conda-${{ hashFiles('envs/environment.yaml') }}-${{ env.DATE }}-${{ env.CACHE_NUMBER }}
- name: Update environment due to outdated or unavailable cache
run: mamba env update -n pypsa-eur -f envs/environment.yaml
if: steps.cache.outputs.cache-hit != 'true'
- name: Test snakemake workflow
run: |
conda activate pypsa-eur
conda list
cp test/config.test1.yaml config.yaml
snakemake --cores all solve_all_networks
rm -rf resources/*.nc resources/*.geojson resources/*.h5 networks results

View File

@ -1,18 +0,0 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: CC0-1.0
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.4.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-merge-conflict
- id: check-added-large-files
args: ['--maxkb=2000']
- repo: https://github.com/fsfe/reuse-tool
rev: latest
hooks:
- id: reuse

View File

@ -4,5 +4,8 @@
version: 2
conda:
environment: envs/environment.docs.yaml
python:
version: 3.8
install:
- requirements: doc/requirements.txt
system_packages: true

19
.syncignore-receive Normal file
View File

@ -0,0 +1,19 @@
# SPDX-FileCopyrightText: : 2021 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: CC0-1.0
.snakemake
.git
.pytest_cache
.ipynb_checkpoints
.vscode
.DS_Store
__pycache__
*.pyc
*.pyo
*.ipynb
notebooks
doc
cutouts
data/bundle
*.nc

23
.syncignore-send Normal file
View File

@ -0,0 +1,23 @@
# SPDX-FileCopyrightText: : 2021 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: CC0-1.0
.snakemake
.git
.pytest_cache
.ipynb_checkpoints
.vscode
.DS_Store
__pycache__
*.pyc
*.pyo
*.ipynb
notebooks
benchmarks
logs
resources*
results
networks*
cutouts
data/bundle
doc

View File

@ -1,36 +0,0 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
branches:
only:
- master
os:
- windows
- linux
- osx
language: bash
before_install:
# install conda
- wget https://raw.githubusercontent.com/trichter/conda4travis/latest/conda4travis.sh -O conda4travis.sh
- source conda4travis.sh
# install conda environment
- conda install -c conda-forge mamba
- mamba env create -f ./envs/environment.yaml
- conda activate pypsa-eur
# install open-source solver
- mamba install -c conda-forge glpk ipopt'<3.13.3'
# list packages for easier debugging
- conda list
script:
- cp ./test/config.test1.yaml ./config.yaml
- snakemake -j all solve_all_networks
- rm -rf resources/*.nc resources/*.geojson resources/*.h5 networks results
# could repeat for more configurations in future

34
CITATION.cff Normal file
View File

@ -0,0 +1,34 @@
# SPDX-FileCopyrightText: : 2021 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: CC0-1.0
cff-version: 1.1.0
message: "If you use this package, please cite the corresponding manuscript in Energy Strategy Reviews."
title: "PyPSA-Eur: An open optimisation model of the European transmission system"
repository: https://github.com/pypsa/pypsa-eur
version: 0.4.0
license: MIT
journal: Energy Strategy Reviews
doi: 10.1016/j.esr.2018.08.012
authors:
- family-names: Hörsch
given-names: Jonas
orcid: https://orcid.org/0000-0001-9438-767X
- family-names: Brown
given-names: Tom
orcid: https://orcid.org/0000-0001-5898-1911
- family-names: Hofmann
given-names: Fabian
orcid: https://orcid.org/0000-0002-6604-5450
- family-names: Neumann
given-names: Fabian
orcid: https://orcid.org/0000-0001-8551-1480
- family-names: Frysztacki
given-names: Martha
orcid: https://orcid.org/0000-0002-0788-1328
- family-names: Hampp
given-names: Johannes
orcid: https://orcid.org/0000-0002-1776-116X
- family-names: Schlachtberger
given-names: David
orcid: https://orcid.org/0000-0002-8167-8213

View File

@ -1,625 +0,0 @@
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright © 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies of this license
document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for software and
other kinds of works.
The licenses for most software and other practical works are designed to take
away your freedom to share and change the works. By contrast, the GNU General
Public License is intended to guarantee your freedom to share and change all
versions of a program--to make sure it remains free software for all its users.
We, the Free Software Foundation, use the GNU General Public License for most
of our software; it applies also to any other work released this way by its
authors. You can apply it to your programs, too.
When we speak of free software, we are referring to freedom, not price. Our
General Public Licenses are designed to make sure that you have the freedom
to distribute copies of free software (and charge for them if you wish), that
you receive source code or can get it if you want it, that you can change
the software or use pieces of it in new free programs, and that you know you
can do these things.
To protect your rights, we need to prevent others from denying you these rights
or asking you to surrender the rights. Therefore, you have certain responsibilities
if you distribute copies of the software, or if you modify it: responsibilities
to respect the freedom of others.
For example, if you distribute copies of such a program, whether gratis or
for a fee, you must pass on to the recipients the same freedoms that you received.
You must make sure that they, too, receive or can get the source code. And
you must show them these terms so they know their rights.
Developers that use the GNU GPL protect your rights with two steps: (1) assert
copyright on the software, and (2) offer you this License giving you legal
permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains that
there is no warranty for this free software. For both users' and authors'
sake, the GPL requires that modified versions be marked as changed, so that
their problems will not be attributed erroneously to authors of previous versions.
Some devices are designed to deny users access to install or run modified
versions of the software inside them, although the manufacturer can do so.
This is fundamentally incompatible with the aim of protecting users' freedom
to change the software. The systematic pattern of such abuse occurs in the
area of products for individuals to use, which is precisely where it is most
unacceptable. Therefore, we have designed this version of the GPL to prohibit
the practice for those products. If such problems arise substantially in other
domains, we stand ready to extend this provision to those domains in future
versions of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents. States
should not allow patents to restrict development and use of software on general-purpose
computers, but in those that do, we wish to avoid the special danger that
patents applied to a free program could make it effectively proprietary. To
prevent this, the GPL assures that patents cannot be used to render the program
non-free.
The precise terms and conditions for copying, distribution and modification
follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of works,
such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this License.
Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals
or organizations.
To "modify" a work means to copy from or adapt all or part of the work in
a fashion requiring copyright permission, other than the making of an exact
copy. The resulting work is called a "modified version" of the earlier work
or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based on the
Program.
To "propagate" a work means to do anything with it that, without permission,
would make you directly or secondarily liable for infringement under applicable
copyright law, except executing it on a computer or modifying a private copy.
Propagation includes copying, distribution (with or without modification),
making available to the public, and in some countries other activities as
well.
To "convey" a work means any kind of propagation that enables other parties
to make or receive copies. Mere interaction with a user through a computer
network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices" to the
extent that it includes a convenient and prominently visible feature that
(1) displays an appropriate copyright notice, and (2) tells the user that
there is no warranty for the work (except to the extent that warranties are
provided), that licensees may convey the work under this License, and how
to view a copy of this License. If the interface presents a list of user commands
or options, such as a menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work for making
modifications to it. "Object code" means any non-source form of a work.
A "Standard Interface" means an interface that either is an official standard
defined by a recognized standards body, or, in the case of interfaces specified
for a particular programming language, one that is widely used among developers
working in that language.
The "System Libraries" of an executable work include anything, other than
the work as a whole, that (a) is included in the normal form of packaging
a Major Component, but which is not part of that Major Component, and (b)
serves only to enable use of the work with that Major Component, or to implement
a Standard Interface for which an implementation is available to the public
in source code form. A "Major Component", in this context, means a major essential
component (kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to produce
the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all the source
code needed to generate, install, and (for an executable work) run the object
code and to modify the work, including scripts to control those activities.
However, it does not include the work's System Libraries, or general-purpose
tools or generally available free programs which are used unmodified in performing
those activities but which are not part of the work. For example, Corresponding
Source includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically linked
subprograms that the work is specifically designed to require, such as by
intimate data communication or control flow between those subprograms and
other parts of the work.
The Corresponding Source need not include anything that users can regenerate
automatically from other parts of the Corresponding Source.
The Corresponding Source for a work in source code form is that same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of copyright
on the Program, and are irrevocable provided the stated conditions are met.
This License explicitly affirms your unlimited permission to run the unmodified
Program. The output from running a covered work is covered by this License
only if the output, given its content, constitutes a covered work. This License
acknowledges your rights of fair use or other equivalent, as provided by copyright
law.
You may make, run and propagate covered works that you do not convey, without
conditions so long as your license otherwise remains in force. You may convey
covered works to others for the sole purpose of having them make modifications
exclusively for you, or provide you with facilities for running those works,
provided that you comply with the terms of this License in conveying all material
for which you do not control copyright. Those thus making or running the covered
works for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of your copyrighted
material outside their relationship with you.
Conveying under any other circumstances is permitted solely under the conditions
stated below. Sublicensing is not allowed; section 10 makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological measure
under any applicable law fulfilling obligations under article 11 of the WIPO
copyright treaty adopted on 20 December 1996, or similar laws prohibiting
or restricting circumvention of such measures.
When you convey a covered work, you waive any legal power to forbid circumvention
of technological measures to the extent such circumvention is effected by
exercising rights under this License with respect to the covered work, and
you disclaim any intention to limit operation or modification of the work
as a means of enforcing, against the work's users, your or third parties'
legal rights to forbid circumvention of technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you receive
it, in any medium, provided that you conspicuously and appropriately publish
on each copy an appropriate copyright notice; keep intact all notices stating
that this License and any non-permissive terms added in accord with section
7 apply to the code; keep intact all notices of the absence of any warranty;
and give all recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey, and you
may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to produce
it from the Program, in the form of source code under the terms of section
4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified it, and
giving a relevant date.
b) The work must carry prominent notices stating that it is released under
this License and any conditions added under section 7. This requirement modifies
the requirement in section 4 to "keep intact all notices".
c) You must license the entire work, as a whole, under this License to anyone
who comes into possession of a copy. This License will therefore apply, along
with any applicable section 7 additional terms, to the whole of the work,
and all its parts, regardless of how they are packaged. This License gives
no permission to license the work in any other way, but it does not invalidate
such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display Appropriate
Legal Notices; however, if the Program has interactive interfaces that do
not display Appropriate Legal Notices, your work need not make them do so.
A compilation of a covered work with other separate and independent works,
which are not by their nature extensions of the covered work, and which are
not combined with it such as to form a larger program, in or on a volume of
a storage or distribution medium, is called an "aggregate" if the compilation
and its resulting copyright are not used to limit the access or legal rights
of the compilation's users beyond what the individual works permit. Inclusion
of a covered work in an aggregate does not cause this License to apply to
the other parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms of sections
4 and 5, provided that you also convey the machine-readable Corresponding
Source under the terms of this License, in one of these ways:
a) Convey the object code in, or embodied in, a physical product (including
a physical distribution medium), accompanied by the Corresponding Source fixed
on a durable physical medium customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product (including
a physical distribution medium), accompanied by a written offer, valid for
at least three years and valid for as long as you offer spare parts or customer
support for that product model, to give anyone who possesses the object code
either (1) a copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical medium customarily
used for software interchange, for a price no more than your reasonable cost
of physically performing this conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the written
offer to provide the Corresponding Source. This alternative is allowed only
occasionally and noncommercially, and only if you received the object code
with such an offer, in accord with subsection 6b.
d) Convey the object code by offering access from a designated place (gratis
or for a charge), and offer equivalent access to the Corresponding Source
in the same way through the same place at no further charge. You need not
require recipients to copy the Corresponding Source along with the object
code. If the place to copy the object code is a network server, the Corresponding
Source may be on a different server (operated by you or a third party) that
supports equivalent copying facilities, provided you maintain clear directions
next to the object code saying where to find the Corresponding Source. Regardless
of what server hosts the Corresponding Source, you remain obligated to ensure
that it is available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided you inform
other peers where the object code and Corresponding Source of the work are
being offered to the general public at no charge under subsection 6d.
A separable portion of the object code, whose source code is excluded from
the Corresponding Source as a System Library, need not be included in conveying
the object code work.
A "User Product" is either (1) a "consumer product", which means any tangible
personal property which is normally used for personal, family, or household
purposes, or (2) anything designed or sold for incorporation into a dwelling.
In determining whether a product is a consumer product, doubtful cases shall
be resolved in favor of coverage. For a particular product received by a particular
user, "normally used" refers to a typical or common use of that class of product,
regardless of the status of the particular user or of the way in which the
particular user actually uses, or expects or is expected to use, the product.
A product is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent the
only significant mode of use of the product.
"Installation Information" for a User Product means any methods, procedures,
authorization keys, or other information required to install and execute modified
versions of a covered work in that User Product from a modified version of
its Corresponding Source. The information must suffice to ensure that the
continued functioning of the modified object code is in no case prevented
or interfered with solely because modification has been made.
If you convey an object code work under this section in, or with, or specifically
for use in, a User Product, and the conveying occurs as part of a transaction
in which the right of possession and use of the User Product is transferred
to the recipient in perpetuity or for a fixed term (regardless of how the
transaction is characterized), the Corresponding Source conveyed under this
section must be accompanied by the Installation Information. But this requirement
does not apply if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has been installed
in ROM).
The requirement to provide Installation Information does not include a requirement
to continue to provide support service, warranty, or updates for a work that
has been modified or installed by the recipient, or for the User Product in
which it has been modified or installed. Access to a network may be denied
when the modification itself materially and adversely affects the operation
of the network or violates the rules and protocols for communication across
the network.
Corresponding Source conveyed, and Installation Information provided, in accord
with this section must be in a format that is publicly documented (and with
an implementation available to the public in source code form), and must require
no special password or key for unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this License
by making exceptions from one or more of its conditions. Additional permissions
that are applicable to the entire Program shall be treated as though they
were included in this License, to the extent that they are valid under applicable
law. If additional permissions apply only to part of the Program, that part
may be used separately under those permissions, but the entire Program remains
governed by this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option remove any
additional permissions from that copy, or from any part of it. (Additional
permissions may be written to require their own removal in certain cases when
you modify the work.) You may place additional permissions on material, added
by you to a covered work, for which you have or can give appropriate copyright
permission.
Notwithstanding any other provision of this License, for material you add
to a covered work, you may (if authorized by the copyright holders of that
material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the terms of
sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or author
attributions in that material or in the Appropriate Legal Notices displayed
by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or requiring
that modified versions of such material be marked in reasonable ways as different
from the original version; or
d) Limiting the use for publicity purposes of names of licensors or authors
of the material; or
e) Declining to grant rights under trademark law for use of some trade names,
trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that material by
anyone who conveys the material (or modified versions of it) with contractual
assumptions of liability to the recipient, for any liability that these contractual
assumptions directly impose on those licensors and authors.
All other non-permissive additional terms are considered "further restrictions"
within the meaning of section 10. If the Program as you received it, or any
part of it, contains a notice stating that it is governed by this License
along with a term that is a further restriction, you may remove that term.
If a license document contains a further restriction but permits relicensing
or conveying under this License, you may add to a covered work material governed
by the terms of that license document, provided that the further restriction
does not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you must place,
in the relevant source files, a statement of the additional terms that apply
to those files, or a notice indicating where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the form
of a separately written license, or stated as exceptions; the above requirements
apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly provided
under this License. Any attempt otherwise to propagate or modify it is void,
and will automatically terminate your rights under this License (including
any patent licenses granted under the third paragraph of section 11).
However, if you cease all violation of this License, then your license from
a particular copyright holder is reinstated (a) provisionally, unless and
until the copyright holder explicitly and finally terminates your license,
and (b) permanently, if the copyright holder fails to notify you of the violation
by some reasonable means prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is reinstated permanently
if the copyright holder notifies you of the violation by some reasonable means,
this is the first time you have received notice of violation of this License
(for any work) from that copyright holder, and you cure the violation prior
to 30 days after your receipt of the notice.
Termination of your rights under this section does not terminate the licenses
of parties who have received copies or rights from you under this License.
If your rights have been terminated and not permanently reinstated, you do
not qualify to receive new licenses for the same material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or run a copy
of the Program. Ancillary propagation of a covered work occurring solely as
a consequence of using peer-to-peer transmission to receive a copy likewise
does not require acceptance. However, nothing other than this License grants
you permission to propagate or modify any covered work. These actions infringe
copyright if you do not accept this License. Therefore, by modifying or propagating
a covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically receives
a license from the original licensors, to run, modify and propagate that work,
subject to this License. You are not responsible for enforcing compliance
by third parties with this License.
An "entity transaction" is a transaction transferring control of an organization,
or substantially all assets of one, or subdividing an organization, or merging
organizations. If propagation of a covered work results from an entity transaction,
each party to that transaction who receives a copy of the work also receives
whatever licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the Corresponding
Source of the work from the predecessor in interest, if the predecessor has
it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the rights
granted or affirmed under this License. For example, you may not impose a
license fee, royalty, or other charge for exercise of rights granted under
this License, and you may not initiate litigation (including a cross-claim
or counterclaim in a lawsuit) alleging that any patent claim is infringed
by making, using, selling, offering for sale, or importing the Program or
any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this License
of the Program or a work on which the Program is based. The work thus licensed
is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims owned or controlled
by the contributor, whether already acquired or hereafter acquired, that would
be infringed by some manner, permitted by this License, of making, using,
or selling its contributor version, but do not include claims that would be
infringed only as a consequence of further modification of the contributor
version. For purposes of this definition, "control" includes the right to
grant patent sublicenses in a manner consistent with the requirements of this
License.
Each contributor grants you a non-exclusive, worldwide, royalty-free patent
license under the contributor's essential patent claims, to make, use, sell,
offer for sale, import and otherwise run, modify and propagate the contents
of its contributor version.
In the following three paragraphs, a "patent license" is any express agreement
or commitment, however denominated, not to enforce a patent (such as an express
permission to practice a patent or covenant not to sue for patent infringement).
To "grant" such a patent license to a party means to make such an agreement
or commitment not to enforce a patent against the party.
If you convey a covered work, knowingly relying on a patent license, and the
Corresponding Source of the work is not available for anyone to copy, free
of charge and under the terms of this License, through a publicly available
network server or other readily accessible means, then you must either (1)
cause the Corresponding Source to be so available, or (2) arrange to deprive
yourself of the benefit of the patent license for this particular work, or
(3) arrange, in a manner consistent with the requirements of this License,
to extend the patent license to downstream recipients. "Knowingly relying"
means you have actual knowledge that, but for the patent license, your conveying
the covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that country
that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or arrangement,
you convey, or propagate by procuring conveyance of, a covered work, and grant
a patent license to some of the parties receiving the covered work authorizing
them to use, propagate, modify or convey a specific copy of the covered work,
then the patent license you grant is automatically extended to all recipients
of the covered work and works based on it.
A patent license is "discriminatory" if it does not include within the scope
of its coverage, prohibits the exercise of, or is conditioned on the non-exercise
of one or more of the rights that are specifically granted under this License.
You may not convey a covered work if you are a party to an arrangement with
a third party that is in the business of distributing software, under which
you make payment to the third party based on the extent of your activity of
conveying the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory patent
license (a) in connection with copies of the covered work conveyed by you
(or copies made from those copies), or (b) primarily for and in connection
with specific products or compilations that contain the covered work, unless
you entered into that arrangement, or that patent license was granted, prior
to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting any implied
license or other defenses to infringement that may otherwise be available
to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or otherwise)
that contradict the conditions of this License, they do not excuse you from
the conditions of this License. If you cannot convey a covered work so as
to satisfy simultaneously your obligations under this License and any other
pertinent obligations, then as a consequence you may not convey it at all.
For example, if you agree to terms that obligate you to collect a royalty
for further conveying from those to whom you convey the Program, the only
way you could satisfy both those terms and this License would be to refrain
entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have permission to
link or combine any covered work with a work licensed under version 3 of the
GNU Affero General Public License into a single combined work, and to convey
the resulting work. The terms of this License will continue to apply to the
part which is the covered work, but the special requirements of the GNU Affero
General Public License, section 13, concerning interaction through a network
will apply to the combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of the
GNU General Public License from time to time. Such new versions will be similar
in spirit to the present version, but may differ in detail to address new
problems or concerns.
Each version is given a distinguishing version number. If the Program specifies
that a certain numbered version of the GNU General Public License "or any
later version" applies to it, you have the option of following the terms and
conditions either of that numbered version or of any later version published
by the Free Software Foundation. If the Program does not specify a version
number of the GNU General Public License, you may choose any version ever
published by the Free Software Foundation.
If the Program specifies that a proxy can decide which future versions of
the GNU General Public License can be used, that proxy's public statement
of acceptance of a version permanently authorizes you to choose that version
for the Program.
Later license versions may give you additional or different permissions. However,
no additional obligations are imposed on any author or copyright holder as
a result of your choosing to follow a later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE
LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER
EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM
PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR
CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL
ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM
AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL,
INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO
USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED
INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE
PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER
PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided above cannot
be given local legal effect according to their terms, reviewing courts shall
apply local law that most closely approximates an absolute waiver of all civil
liability in connection with the Program, unless a warranty or assumption
of liability accompanies a copy of the Program in return for a fee. END OF
TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest possible
use to the public, the best way to achieve this is to make it free software
which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest to attach
them to the start of each source file to most effectively state the exclusion
of warranty; and each file should have at least the "copyright" line and a
pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation, either version 3 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short notice like
this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it under certain
conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands might
be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary. For
more information on this, and how to apply and follow the GNU GPL, see <https://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you may
consider it more useful to permit linking proprietary applications with the
library. If this is what you want to do, use the GNU Lesser General Public
License instead of this License. But first, please read <https://www.gnu.org/
licenses /why-not-lgpl.html>.

20
LICENSES/MIT.txt Normal file
View File

@ -0,0 +1,20 @@
MIT License
Copyright 2017-2021 The PyPSA-Eur Authors
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@ -4,7 +4,7 @@ SPDX-License-Identifier: CC-BY-4.0
-->
![GitHub release (latest by date including pre-releases)](https://img.shields.io/github/v/release/pypsa/pypsa-eur?include_prereleases)
[![Build Status](https://travis-ci.org/PyPSA/pypsa-eur.svg?branch=master)](https://travis-ci.org/PyPSA/pypsa-eur)
[![Build Status](https://github.com/pypsa/pypsa-eur/actions/workflows/ci.yaml/badge.svg)](https://github.com/PyPSA/pypsa-eur/actions)
[![Documentation](https://readthedocs.org/projects/pypsa-eur/badge/?version=latest)](https://pypsa-eur.readthedocs.io/en/latest/?badge=latest)
![Size](https://img.shields.io/github/repo-size/pypsa/pypsa-eur)
[![Zenodo](https://zenodo.org/badge/DOI/10.5281/zenodo.3520874.svg)](https://doi.org/10.5281/zenodo.3520874)
@ -58,7 +58,7 @@ The dataset consists of:
- Electrical demand time series from the
[OPSD project](https://open-power-system-data.org/).
- Renewable time series based on ERA5 and SARAH, assembled using the [atlite tool](https://github.com/FRESNA/atlite).
- Geographical potentials for wind and solar generators based on land use (CORINE) and excluding nature reserves (Natura2000) are computed with the [vresutils library](https://github.com/FRESNA/vresutils) and the [glaes library](https://github.com/FZJ-IEK3-VSA/glaes).
- Geographical potentials for wind and solar generators based on land use (CORINE) and excluding nature reserves (Natura2000) are computed with the [atlite library](https://github.com/PyPSA/atlite).
Already-built versions of the model can be found in the accompanying [Zenodo
repository](https://doi.org/10.5281/zenodo.3601881).

224
Snakefile
View File

@ -1,10 +1,13 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
from os.path import normpath, exists
from shutil import copyfile
from snakemake.remote.HTTP import RemoteProvider as HTTPRemoteProvider
HTTP = HTTPRemoteProvider()
if not exists("config.yaml"):
copyfile("config.default.yaml", "config.yaml")
@ -23,19 +26,19 @@ wildcard_constraints:
rule cluster_all_networks:
input: expand("networks/elec{year}_s{simpl}_{clusters}.nc", **config['scenario'])
input: expand("networks/elec{weather_year}_s{simpl}_{clusters}.nc", **config['scenario'])
rule extra_components_all_networks:
input: expand("networks/elec{year}_s{simpl}_{clusters}_ec.nc", **config['scenario'])
input: expand("networks/elec{weather_year}_s{simpl}_{clusters}_ec.nc", **config['scenario'])
rule prepare_all_networks:
input: expand("networks/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", **config['scenario'])
input: expand("networks/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", **config['scenario'])
rule solve_all_networks:
input: expand("results/networks/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", **config['scenario'])
input: expand("results/networks/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc", **config['scenario'])
if config['enable'].get('prepare_links_p_nom', False):
@ -43,7 +46,7 @@ if config['enable'].get('prepare_links_p_nom', False):
output: 'data/links_p_nom.csv'
log: 'logs/prepare_links_p_nom.log'
threads: 1
resources: mem=500
resources: mem_mb=500
script: 'scripts/prepare_links_p_nom.py'
@ -66,7 +69,14 @@ if config['enable'].get('retrieve_databundle', True):
script: 'scripts/retrieve_databundle.py'
rule retrieve_load_data:
input: HTTP.remote("data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv", keep_local=True, static=True)
output: "data/load_raw.csv"
shell: "mv {input} {output}"
rule build_load_data:
input: "data/load_raw.csv"
output: "resources/load.csv"
log: "logs/build_load_data.log"
script: 'scripts/build_load_data.py'
@ -79,7 +89,7 @@ rule build_powerplants:
output: "resources/powerplants.csv"
log: "logs/build_powerplants.log"
threads: 1
resources: mem=500
resources: mem_mb=500
script: "scripts/build_powerplants.py"
@ -100,7 +110,7 @@ rule base_network:
log: "logs/base_network.log"
benchmark: "benchmarks/base_network"
threads: 1
resources: mem=500
resources: mem_mb=500
script: "scripts/base_network.py"
@ -120,7 +130,7 @@ rule build_shapes:
nuts3_shapes='resources/nuts3_shapes.geojson'
log: "logs/build_shapes.log"
threads: 1
resources: mem=500
resources: mem_mb=500
script: "scripts/build_shapes.py"
@ -134,32 +144,34 @@ rule build_bus_regions:
regions_offshore="resources/regions_offshore.geojson"
log: "logs/build_bus_regions.log"
threads: 1
resources: mem=1000
resources: mem_mb=1000
script: "scripts/build_bus_regions.py"
if config['enable'].get('build_cutout', False):
rule build_cutout:
output: directory("cutouts/{cutout}")
input:
regions_onshore="resources/regions_onshore.geojson",
regions_offshore="resources/regions_offshore.geojson"
output: "cutouts/{cutout}.nc"
log: "logs/build_cutout/{cutout}.log"
benchmark: "benchmarks/build_cutout_{cutout}"
threads: ATLITE_NPROCESSES
resources: mem=ATLITE_NPROCESSES * 1000
resources: mem_mb=ATLITE_NPROCESSES * 1000
script: "scripts/build_cutout.py"
if config['enable'].get('retrieve_cutout', True):
rule retrieve_cutout:
output: directory(expand("cutouts/{cutouts}", **config['atlite'])),
log: "logs/retrieve_cutout.log"
script: 'scripts/retrieve_cutout.py'
input: HTTP.remote("zenodo.org/record/4709858/files/{cutout}.nc", keep_local=True, static=True)
output: "cutouts/{cutout}.nc"
shell: "mv {input} {output}"
if config['enable'].get('build_natura_raster', False):
rule build_natura_raster:
input:
natura="data/bundle/natura/Natura2000_end2015.shp",
cutouts=expand("cutouts/{cutouts}", **config['atlite'])
cutouts=expand("cutouts/{cutouts}.nc", **config['atlite'])
output: "resources/natura.tiff"
log: "logs/build_natura_raster.log"
script: "scripts/build_natura_raster.py"
@ -167,9 +179,9 @@ if config['enable'].get('build_natura_raster', False):
if config['enable'].get('retrieve_natura_raster', True):
rule retrieve_natura_raster:
input: HTTP.remote("zenodo.org/record/4706686/files/natura.tiff", keep_local=True, static=True)
output: "resources/natura.tiff"
log: "logs/retrieve_natura_raster.log"
script: 'scripts/retrieve_natura_raster.py'
shell: "mv {input} {output}"
ruleorder: build_hydro_profile > build_renewable_profiles
@ -187,13 +199,13 @@ rule build_renewable_profiles:
regions=lambda w: ("resources/regions_onshore.geojson"
if w.technology in ('onwind', 'solar')
else "resources/regions_offshore.geojson"),
cutout=lambda w: "cutouts/" + config["renewable"][w.technology]['cutout']
cutout=lambda w: "cutouts/" + config["renewable"][w.technology]['cutout'] + ".nc"
output:
profile="resources/profile{year}_{technology}.nc",
log: "logs/build_renewable_profile{year}_{technology}.log"
benchmark: "benchmarks/build_renewable_profiles{year}_{technology}"
profile="resources/profile{weather_year}_{technology}.nc",
log: "logs/build_renewable_profile{weather_year}_{technology}.log"
benchmark: "benchmarks/build_renewable_profiles{weather_year}_{technology}"
threads: ATLITE_NPROCESSES
resources: mem=ATLITE_NPROCESSES * 5000
resources: mem_mb=ATLITE_NPROCESSES * 5000
script: "scripts/build_renewable_profiles.py"
@ -202,9 +214,9 @@ if 'hydro' in config['renewable'].keys():
input:
country_shapes='resources/country_shapes.geojson',
eia_hydro_generation='data/bundle/EIA_hydro_generation_2000_2014.csv',
cutout="cutouts/" + config["renewable"]['hydro']['cutout']
output: 'resources/profile{year}_hydro.nc'
log: "logs/build_hydro_profile{year}.log"
cutout="cutouts/" + config["renewable"]['hydro']['cutout'] + ".nc"
output: 'resources/profile{weather_year}_hydro.nc'
log: "logs/build_hydro_profile{weather_year}.log"
resources: mem=5000
script: 'scripts/build_hydro_profile.py'
@ -219,73 +231,74 @@ rule add_electricity:
geth_hydro_capacities='data/geth2015_hydro_capacities.csv',
load='resources/load.csv',
nuts3_shapes='resources/nuts3_shapes.geojson',
**{f"profile_{tech}": "resources/profile{year}_" + f"{tech}.nc"
**{f"profile_{tech}": "resources/profile{weather_year}_" + f"{tech}.nc"
for tech in config['renewable']}
output: "networks/elec{year}.nc"
log: "logs/add_electricity{year}.log"
benchmark: "benchmarks/add_electricity{year}"
output: "networks/elec{weather_year}.nc"
log: "logs/add_electricity{weather_year}.log"
benchmark: "benchmarks/add_electricity{weather_year}"
threads: 1
resources: mem=3000
resources: mem_mb=5000
script: "scripts/add_electricity.py"
rule simplify_network:
input:
network='networks/elec{year}.nc',
network='networks/elec{weather_year}.nc',
tech_costs=COSTS,
regions_onshore="resources/regions_onshore.geojson",
regions_offshore="resources/regions_offshore.geojson"
output:
network='networks/elec{year}_s{simpl}.nc',
regions_onshore="resources/regions_onshore_elec{year}_s{simpl}.geojson",
regions_offshore="resources/regions_offshore_elec{year}_s{simpl}.geojson",
busmap='resources/busmap_elec{year}_s{simpl}.csv'
log: "logs/simplify_network/elec{year}_s{simpl}.log"
benchmark: "benchmarks/simplify_network/elec{year}_s{simpl}"
network='networks/elec{weather_year}_s{simpl}.nc',
regions_onshore="resources/regions_onshore_elec{weather_year}_s{simpl}.geojson",
regions_offshore="resources/regions_offshore_elec{weather_year}_s{simpl}.geojson",
busmap='resources/busmap_elec{weather_year}_s{simpl}.csv',
connection_costs='resources/connection_costs{weather_year}_s{simpl}.csv'
log: "logs/simplify_network/elec{weather_year}_s{simpl}.log"
benchmark: "benchmarks/simplify_network/elec{weather_year}_s{simpl}"
threads: 1
resources: mem=4000
resources: mem_mb=4000
script: "scripts/simplify_network.py"
rule cluster_network:
input:
network='networks/elec{year}_s{simpl}.nc',
regions_onshore="resources/regions_onshore_elec{year}_s{simpl}.geojson",
regions_offshore="resources/regions_offshore_elec{year}_s{simpl}.geojson",
busmap=ancient('resources/busmap_elec{year}_s{simpl}.csv'),
network='networks/elec{weather_year}_s{simpl}.nc',
regions_onshore="resources/regions_onshore_elec{weather_year}_s{simpl}.geojson",
regions_offshore="resources/regions_offshore_elec{weather_year}_s{simpl}.geojson",
busmap=ancient('resources/busmap_elec{weather_year}_s{simpl}.csv'),
tech_costs=COSTS
output:
network='networks/elec{year}_s{simpl}_{clusters}.nc',
regions_onshore="resources/regions_onshore_elec{year}_s{simpl}_{clusters}.geojson",
regions_offshore="resources/regions_offshore_elec{year}_s{simpl}_{clusters}.geojson",
busmap="resources/busmap_elec{year}_s{simpl}_{clusters}.csv",
linemap="resources/linemap_elec{year}_s{simpl}_{clusters}.csv"
log: "logs/cluster_network/elec{year}_s{simpl}_{clusters}.log"
benchmark: "benchmarks/cluster_network/elec{year}_s{simpl}_{clusters}"
network='networks/elec{weather_year}_s{simpl}_{clusters}.nc',
regions_onshore="resources/regions_onshore_elec{weather_year}_s{simpl}_{clusters}.geojson",
regions_offshore="resources/regions_offshore_elec{weather_year}_s{simpl}_{clusters}.geojson",
busmap="resources/busmap_elec{weather_year}_s{simpl}_{clusters}.csv",
linemap="resources/linemap_elec{weather_year}_s{simpl}_{clusters}.csv"
log: "logs/cluster_network/elec{weather_year}_s{simpl}_{clusters}.log"
benchmark: "benchmarks/cluster_network/elec{weather_year}_s{simpl}_{clusters}"
threads: 1
resources: mem=3000
resources: mem_mb=6000
script: "scripts/cluster_network.py"
rule add_extra_components:
input:
network='networks/elec{year}_s{simpl}_{clusters}.nc',
network='networks/elec{weather_year}_s{simpl}_{clusters}.nc',
tech_costs=COSTS,
output: 'networks/elec{year}_s{simpl}_{clusters}_ec.nc'
log: "logs/add_extra_components/elec{year}_s{simpl}_{clusters}.log"
benchmark: "benchmarks/add_extra_components/elec{year}_s{simpl}_{clusters}_ec"
output: 'networks/elec{weather_year}_s{simpl}_{clusters}_ec.nc'
log: "logs/add_extra_components/elec{weather_year}_s{simpl}_{clusters}.log"
benchmark: "benchmarks/add_extra_components/elec{weather_year}_s{simpl}_{clusters}_ec"
threads: 1
resources: mem=3000
resources: mem_mb=3000
script: "scripts/add_extra_components.py"
rule prepare_network:
input: 'networks/elec{year}_s{simpl}_{clusters}_ec.nc', tech_costs=COSTS
output: 'networks/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc'
log: "logs/prepare_network/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.log"
benchmark: "benchmarks/prepare_network/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}"
input: 'networks/elec{weather_year}_s{simpl}_{clusters}_ec.nc', tech_costs=COSTS
output: 'networks/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc'
log: "logs/prepare_network/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.log"
benchmark: "benchmarks/prepare_network/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}"
threads: 1
resources: mem=1000
resources: mem_mb=4000
script: "scripts/prepare_network.py"
@ -308,43 +321,43 @@ def memory(w):
rule solve_network:
input: "networks/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
output: "results/networks/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
input: "networks/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
output: "results/networks/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
log:
solver=normpath("logs/solve_network/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_solver.log"),
python="logs/solve_network/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_python.log",
memory="logs/solve_network/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_memory.log"
benchmark: "benchmarks/solve_network/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}"
solver=normpath("logs/solve_network/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_solver.log"),
python="logs/solve_network/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_python.log",
memory="logs/solve_network/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_memory.log"
benchmark: "benchmarks/solve_network/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}"
threads: 4
resources: mem=memory
shadow: "shallow"
resources: mem_mb=memory
shadow: "minimal"
script: "scripts/solve_network.py"
rule solve_operations_network:
input:
unprepared="networks/elec{year}_s{simpl}_{clusters}_ec.nc",
optimized="results/networks/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
output: "results/networks/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_op.nc"
unprepared="networks/elec{weather_year}_s{simpl}_{clusters}_ec.nc",
optimized="results/networks/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
output: "results/networks/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_op.nc"
log:
solver=normpath("logs/solve_operations_network/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_solver.log"),
python="logs/solve_operations_network/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_python.log",
memory="logs/solve_operations_network/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_memory.log"
benchmark: "benchmarks/solve_operations_network/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}"
solver=normpath("logs/solve_operations_network/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_solver.log"),
python="logs/solve_operations_network/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_python.log",
memory="logs/solve_operations_network/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_memory.log"
benchmark: "benchmarks/solve_operations_network/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}"
threads: 4
resources: mem=(lambda w: 5000 + 372 * int(w.clusters))
shadow: "shallow"
resources: mem_mb=(lambda w: 5000 + 372 * int(w.clusters))
shadow: "minimal"
script: "scripts/solve_operations_network.py"
rule plot_network:
input:
network="results/networks/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
network="results/networks/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
tech_costs=COSTS
output:
only_map="results/plots/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{attr}.{ext}",
ext="results/plots/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{attr}_ext.{ext}"
log: "logs/plot_network/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{attr}_{ext}.log"
only_map="results/plots/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{attr}.{ext}",
ext="results/plots/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{attr}_ext.{ext}"
log: "logs/plot_network/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{attr}_{ext}.log"
script: "scripts/plot_network.py"
@ -357,8 +370,7 @@ def input_make_summary(w):
else:
ll = w.ll
return ([COSTS] +
expand("results/networks/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
network=w.network,
expand("results/networks/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
ll=ll,
**{k: config["scenario"][k] if getattr(w, k) == "all" else getattr(w, k)
for k in ["simpl", "clusters", "opts"]}))
@ -366,53 +378,27 @@ def input_make_summary(w):
rule make_summary:
input: input_make_summary
output: directory("results/summaries/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}")
log: "logs/make_summary/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}.log",
output: directory("results/summaries/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}")
log: "logs/make_summary/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}.log",
script: "scripts/make_summary.py"
rule plot_summary:
input: "results/summaries/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}"
output: "results/plots/{year}/summary_{summary}_elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}.{ext}"
log: "logs/plot_summary/{summary}_elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}_{ext}.log"
input: "results/summaries/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}"
output: "results/plots/{weather_year}/summary_{summary}_elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}.{ext}"
log: "logs/plot_summary/{summary}_elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_{country}_{ext}.log"
script: "scripts/plot_summary.py"
def input_plot_p_nom_max(w):
return [("networks/elec{year}_s{simpl}{maybe_cluster}.nc"
return [("networks/elec{weather_year}_s{simpl}{maybe_cluster}.nc"
.format(maybe_cluster=('' if c == 'full' else ('_' + c)), **w))
for c in w.clusts.split(",")]
rule plot_p_nom_max:
input: input_plot_p_nom_max
output: "results/plots/elec{year}_s{simpl}_cum_p_nom_max_{clusts}_{techs}_{country}.{ext}"
log: "logs/plot_p_nom_max/elec{year}_s{simpl}_{clusts}_{techs}_{country}_{ext}.log"
output: "results/plots/elec{weather_year}_s{simpl}_cum_p_nom_max_{clusts}_{techs}_{country}.{ext}"
log: "logs/plot_p_nom_max/elec{weather_year}_s{simpl}_{clusts}_{techs}_{country}_{ext}.log"
script: "scripts/plot_p_nom_max.py"
rule build_country_flh:
input:
base_network="networks/base.nc",
corine="data/bundle/corine/g250_clc06_V18_5.tif",
natura="resources/natura.tiff",
gebco=lambda w: ("data/bundle/GEBCO_2014_2D.nc"
if "max_depth" in config["renewable"][w.technology].keys()
else []),
country_shapes='resources/country_shapes.geojson',
offshore_shapes='resources/offshore_shapes.geojson',
pietzker="data/pietzker2014.xlsx",
regions=lambda w: ("resources/country_shapes.geojson"
if w.technology in ('onwind', 'solar')
else "resources/offshore_shapes.geojson"),
cutout=lambda w: "cutouts/" + config["renewable"][w.technology]['cutout']
output:
area="resources/country_flh_area_{technology}.csv",
aggregated="resources/country_flh_aggregated_{technology}.csv",
uncorrected="resources/country_flh_uncorrected_{technology}.csv",
plot="resources/country_flh_{technology}.pdf",
exclusion=directory("resources/country_exclusion_{technology}")
log: "logs/build_country_flh_{technology}.log"
resources: mem=10000
benchmark: "benchmarks/build_country_flh_{technology}"
script: "scripts/build_country_flh.py"

View File

@ -2,7 +2,7 @@
#
# SPDX-License-Identifier: CC0-1.0
version: 0.3.0
version: 0.4.0
tutorial: false
logging:
@ -20,6 +20,10 @@ scenario:
countries: ['AL', 'AT', 'BA', 'BE', 'BG', 'CH', 'CZ', 'DE', 'DK', 'EE', 'ES', 'FI', 'FR', 'GB', 'GR', 'HR', 'HU', 'IE', 'IT', 'LT', 'LU', 'LV', 'ME', 'MK', 'NL', 'NO', 'PL', 'PT', 'RO', 'RS', 'SE', 'SI', 'SK']
clustering:
simplify:
to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections)
snapshots:
start: "2013-01-01"
end: "2014-01-01"
@ -37,7 +41,7 @@ enable:
electricity:
voltages: [220., 300., 380.]
co2limit: 7.75e+7 # 0.05 * 3.1e9*0.5
co2base: 1.487e9
co2base: 1.487e+9
agg_p_nom_limits: data/agg_p_nom_minmax.csv
extendable_carriers:
@ -63,18 +67,28 @@ electricity:
atlite:
nprocesses: 4
cutouts:
# use 'base' to determine geographical bounds and time span from config
# base:
# module: era5
europe-2013-era5:
module: era5
xs: [-12., 35.]
ys: [72., 33.]
years: [2013, 2013]
module: era5 # in priority order
x: [-12., 35.]
y: [33., 72]
dx: 0.3
dy: 0.3
time: ['2013', '2013']
europe-2013-sarah:
module: sarah
resolution: 0.2
xs: [-12., 42.]
ys: [65., 33.]
years: [2013, 2013]
module: [sarah, era5] # in priority order
x: [-12., 45.]
y: [33., 65]
dx: 0.2
dy: 0.2
time: ['2013', '2013']
sarah_interpolate: false
sarah_dir:
features: [influx, temperature]
renewable:
onwind:
cutout: europe-2013-era5
@ -98,8 +112,11 @@ renewable:
resource:
method: wind
turbine: NREL_ReferenceTurbine_5MW_offshore
capacity_per_sqkm: 3
# correction_factor: 0.93
capacity_per_sqkm: 2
correction_factor: 0.8855
# proxy for wake losses
# from 10.1016/j.energy.2018.08.153
# until done more rigorously in #153
corine: [44, 255]
natura: true
max_depth: 50
@ -112,8 +129,11 @@ renewable:
method: wind
turbine: NREL_ReferenceTurbine_5MW_offshore
# ScholzPhd Tab 4.3.1: 10MW/km^2
capacity_per_sqkm: 3
# correction_factor: 0.93
capacity_per_sqkm: 2
correction_factor: 0.8855
# proxy for wake losses
# from 10.1016/j.energy.2018.08.153
# until done more rigorously in #153
corine: [44, 255]
natura: true
max_depth: 50
@ -129,12 +149,14 @@ renewable:
slope: 35.
azimuth: 180.
capacity_per_sqkm: 1.7 # ScholzPhd Tab 4.3.1: 170 MW/km^2
# Determined by comparing uncorrected area-weighted full-load hours to those
# Correction factor determined by comparing uncorrected area-weighted full-load hours to those
# published in Supplementary Data to
# Pietzcker, Robert Carl, et al. "Using the sun to decarbonize the power
# sector: The economic potential of photovoltaics and concentrating solar
# power." Applied Energy 135 (2014): 704-720.
correction_factor: 0.854337
# This correction factor of 0.854337 may be in order if using reanalysis data.
# for discussion refer to https://github.com/PyPSA/pypsa-eur/pull/304
# correction_factor: 0.854337
corine: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
14, 15, 16, 17, 18, 19, 20, 26, 31, 32]
natura: true
@ -169,8 +191,7 @@ transformers:
type: ''
load:
url: https://data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv
power_statistics: True # only for files from <2019; set false in order to get ENTSOE transparency data
power_statistics: True # only for files from <2019; set false in order to get ENTSOE transparency data
interpolate_limit: 3 # data gaps up until this size are interpolated linearly
time_shift_for_large_gaps: 1w # data gaps up until this size are copied by copying from
manual_adjustments: true # false
@ -220,8 +241,8 @@ solving:
# threads: 4
# lpmethod: 4 # barrier
# solutiontype: 2 # non basic solution, ie no crossover
# barrier_convergetol: 1.e-5
# feasopt_tolerance: 1.e-6
# barrier.convergetol: 1.e-5
# feasopt.tolerance: 1.e-6
plotting:
map:

View File

@ -2,7 +2,7 @@
#
# SPDX-License-Identifier: CC0-1.0
version: 0.3.0
version: 0.4.0
tutorial: true
logging:
@ -20,6 +20,10 @@ scenario:
countries: ['DE']
clustering:
simplify:
to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections)
snapshots:
start: "2013-03-01"
end: "2013-04-01"
@ -55,16 +59,15 @@ electricity:
atlite:
nprocesses: 4
cutouts:
europe-2013-era5:
europe-2013-era5-tutorial:
module: era5
xs: [4., 15.]
ys: [56., 46.]
months: [3, 3]
years: [2013, 2013]
x: [4., 15.]
y: [46., 56.]
time: ["2013-03", "2013-03"]
renewable:
onwind:
cutout: europe-2013-era5
cutout: europe-2013-era5-tutorial
resource:
method: wind
turbine: Vestas_V112_3MW
@ -81,7 +84,7 @@ renewable:
potential: simple # or conservative
clip_p_max_pu: 1.e-2
offwind-ac:
cutout: europe-2013-era5
cutout: europe-2013-era5-tutorial
resource:
method: wind
turbine: NREL_ReferenceTurbine_5MW_offshore
@ -93,7 +96,7 @@ renewable:
potential: simple # or conservative
clip_p_max_pu: 1.e-2
offwind-dc:
cutout: europe-2013-era5
cutout: europe-2013-era5-tutorial
resource:
method: wind
turbine: NREL_ReferenceTurbine_5MW_offshore
@ -106,7 +109,7 @@ renewable:
potential: simple # or conservative
clip_p_max_pu: 1.e-2
solar:
cutout: europe-2013-era5
cutout: europe-2013-era5-tutorial
resource:
method: pv
panel: CSi
@ -114,12 +117,13 @@ renewable:
slope: 35.
azimuth: 180.
capacity_per_sqkm: 1.7 # ScholzPhd Tab 4.3.1: 170 MW/km^2
# Determined by comparing uncorrected area-weighted full-load hours to those
# Correction factor determined by comparing uncorrected area-weighted full-load hours to those
# published in Supplementary Data to
# Pietzcker, Robert Carl, et al. "Using the sun to decarbonize the power
# sector: The economic potential of photovoltaics and concentrating solar
# power." Applied Energy 135 (2014): 704-720.
correction_factor: 0.854337
# This correction factor of 0.854337 may be in order if using reanalysis data.
# correction_factor: 0.854337
corine: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
14, 15, 16, 17, 18, 19, 20, 26, 31, 32]
natura: true
@ -148,8 +152,7 @@ transformers:
type: ''
load:
url: https://data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv
power_statistics: True # only for files from <2019; set false in order to get ENTSOE transparency data
power_statistics: True # only for files from <2019; set false in order to get ENTSOE transparency data
interpolate_limit: 3 # data gaps up until this size are interpolated linearly
time_shift_for_large_gaps: 1w # data gaps up until this size are copied by copying from
manual_adjustments: true # false

View File

@ -1,6 +1,6 @@
# SPDX-FileCopyrightText: 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
# Makefile for Sphinx documentation
#

View File

@ -1,5 +1,5 @@
/* SPDX-FileCopyrightText: 2017-2020 The PyPSA-Eur Authors
SPDX-License-Identifier: GPL-3.0-or-later
SPDX-License-Identifier: MIT
*/
.wy-side-nav-search {

View File

@ -108,6 +108,7 @@ Make sure that your instance is operating for the next steps.
- Option 1. Click on the Tools button and "Install Public Key into Server..". Somewhere in your folder structure must be a public key. I found it with the following folder syntax on my local windows computer -> :\Users\...\.ssh (there should be a PKK file).
- Option 2. Click on the Tools button and "Generate new key pair...". Save the private key at a folder you remember and add it to the "private key file" field in WinSCP. Upload the public key to the metadeta of your instance.
- Click ok and save. Then click Login. If successfull WinSCP will open on the left side your local computer folder structure and on the right side the folder strucutre of your VM. (If you followed Option 2 and its not initially working. Stop your instance, refresh the website, reopen the WinSCP field. Afterwards your your Login should be successfull)
If you had struggle with the above steps, you could also try `this video <https://www.youtube.com/watch?v=lYx1oQkEF0E>`_.
.. note::

View File

@ -1,6 +1,6 @@
# SPDX-FileCopyrightText: 20017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
# -*- coding: utf-8 -*-
#
@ -76,7 +76,7 @@ author = u'Jonas Hoersch (KIT, FIAS), Fabian Hofmann (FIAS), David Schlachtberge
# The short X.Y version.
version = u'0.3'
# The full version, including alpha/beta/rc tags.
release = u'0.3.0'
release = u'0.4.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
@ -157,16 +157,12 @@ html_theme_options = {
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# These folders are copied to the documentation's HTML output
html_static_path = ["_static"]
html_context = {
'css_files': [
'_static/theme_overrides.css', # override wide tables in RTD theme
],
}
# These paths are either relative to html_static_path
# or fully qualified paths (eg. https://...)
html_css_files = ["theme_overrides.css"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied

View File

@ -1,8 +1,9 @@
,Unit,Values,Description
nprocesses,--,int,"Number of parallel processes in cutout preparation"
cutouts,,,
-- {name},--,"Convention is to name cutouts like ``<region>-<year>-<source>`` (e.g. ``europe-2013-era5``).","Directory to write cutout data to. The user may specify multiple cutouts under configuration ``atlite: cutouts:``. Reference is used in configuration ``renewable: {technology}: cutout:``"
-- -- module,--,"One of {'era5','sarah'}","Source of the reanalysis weather dataset (e.g. `ERA5 <https://www.ecmwf.int/en/forecasts/datasets/reanalysis-datasets/era5>`_ or `SARAH-2 <https://wui.cmsaf.eu/safira/action/viewDoiDetails?acronym=SARAH_V002>`_)"
-- -- xs,°,"Float interval within [-180, 180]","Range of longitudes to download weather data for."
-- -- ys,°,"Float interval within [-90, 90]","Range of latitudes to download weather data for."
-- -- years,--,"Integer interval within [1979,2018]","Range of years to download weather data for."
-- {name},--,"Convention is to name cutouts like ``<region>-<year>-<source>`` (e.g. ``europe-2013-era5``).","Name of the cutout netcdf file. The user may specify multiple cutouts under configuration ``atlite: cutouts:``. Reference is used in configuration ``renewable: {technology}: cutout:``. The cutout ``base`` may be used to automatically calculate temporal and spatial bounds of the network."
-- -- module,--,"Subset of {'era5','sarah'}","Source of the reanalysis weather dataset (e.g. `ERA5 <https://www.ecmwf.int/en/forecasts/datasets/reanalysis-datasets/era5>`_ or `SARAH-2 <https://wui.cmsaf.eu/safira/action/viewDoiDetails?acronym=SARAH_V002>`_)"
-- -- x,°,"Float interval within [-180, 180]","Range of longitudes to download weather data for. If not defined, it defaults to the spatial bounds of all bus shapes."
-- -- y,°,"Float interval within [-90, 90]","Range of latitudes to download weather data for. If not defined, it defaults to the spatial bounds of all bus shapes."
-- -- time,,"Time interval within ['1979', '2018'] (with valid pandas date time strings)","Time span to download weather data for. If not defined, it defaults to the time interval spanned by the snapshots."
-- -- features,,"String or list of strings with valid cutout features ('inlfux', 'wind').","When freshly building a cutout, retrieve data only for those features. If not defined, it defaults to all available features."

1 Unit Values Description
2 nprocesses -- int Number of parallel processes in cutout preparation
3 cutouts
4 -- {name} -- Convention is to name cutouts like ``<region>-<year>-<source>`` (e.g. ``europe-2013-era5``). Directory to write cutout data to. The user may specify multiple cutouts under configuration ``atlite: cutouts:``. Reference is used in configuration ``renewable: {technology}: cutout:`` Name of the cutout netcdf file. The user may specify multiple cutouts under configuration ``atlite: cutouts:``. Reference is used in configuration ``renewable: {technology}: cutout:``. The cutout ``base`` may be used to automatically calculate temporal and spatial bounds of the network.
5 -- -- module -- One of {'era5','sarah'} Subset of {'era5','sarah'} Source of the reanalysis weather dataset (e.g. `ERA5 <https://www.ecmwf.int/en/forecasts/datasets/reanalysis-datasets/era5>`_ or `SARAH-2 <https://wui.cmsaf.eu/safira/action/viewDoiDetails?acronym=SARAH_V002>`_)
6 -- -- xs -- -- x ° Float interval within [-180, 180] Range of longitudes to download weather data for. Range of longitudes to download weather data for. If not defined, it defaults to the spatial bounds of all bus shapes.
7 -- -- ys -- -- y ° Float interval within [-90, 90] Range of latitudes to download weather data for. Range of latitudes to download weather data for. If not defined, it defaults to the spatial bounds of all bus shapes.
8 -- -- years -- -- time -- Integer interval within [1979,2018] Time interval within ['1979', '2018'] (with valid pandas date time strings) Range of years to download weather data for. Time span to download weather data for. If not defined, it defaults to the time interval spanned by the snapshots.
9 -- -- features String or list of strings with valid cutout features ('inlfux', 'wind'). When freshly building a cutout, retrieve data only for those features. If not defined, it defaults to all available features.

View File

@ -0,0 +1,3 @@
,Unit,Values,Description
simplify,,,
-- to_substations,bool,"{'true','false'}","Aggregates all nodes without power injection (positive or negative, i.e. demand or generation) to electrically closest ones"
1 Unit Values Description
2 simplify
3 -- to_substations bool {'true','false'} Aggregates all nodes without power injection (positive or negative, i.e. demand or generation) to electrically closest ones

View File

@ -1,19 +1,19 @@
,Unit,Values,Description,
voltages,kV,"Any subset of {220., 300., 380.}",Voltage levels to consider when,
co2limit,:math:`t_{CO_2-eq}/a`,float,Cap on total annual system carbon dioxide emissions,
co2base,:math:`t_{CO_2-eq}/a`,float,Reference value of total annual system carbon dioxide emissions if relative emission reduction target is specified in ``{opts}`` wildcard.,
,Unit,Values,Description
voltages,kV,"Any subset of {220., 300., 380.}",Voltage levels to consider when
co2limit,:math:`t_{CO_2-eq}/a`,float,Cap on total annual system carbon dioxide emissions
co2base,:math:`t_{CO_2-eq}/a`,float,Reference value of total annual system carbon dioxide emissions if relative emission reduction target is specified in ``{opts}`` wildcard.
agg_p_nom_limits,file,path,Reference to ``.csv`` file specifying per carrier generator nominal capacity constraints for individual countries if ``'CCL'`` is in ``{opts}`` wildcard. Defaults to ``data/agg_p_nom_minmax.csv``.
extendable_carriers,,,,
extendable_carriers,,,
-- Generator,--,"Any subset of {'OCGT','CCGT'}",Places extendable conventional power plants (OCGT and/or CCGT) where gas power plants are located today without capacity limits.
-- StorageUnit,--,"Any subset of {'battery','H2'}",Adds extendable storage units (battery and/or hydrogen) at every node/bus after clustering without capacity limits and with zero initial capacity.
-- Store,--,"Any subset of {'battery','H2'}",Adds extendable storage units (battery and/or hydrogen) at every node/bus after clustering without capacity limits and with zero initial capacity.
-- Link,--,Any subset of {'H2 pipeline'},Adds extendable links (H2 pipelines only) at every connection where there are lines or HVDC links without capacity limits and with zero initial capacity. Hydrogen pipelines require hydrogen storage to be modelled as ``Store``.
max_hours,,,,
max_hours,,,
-- battery,h,float,Maximum state of charge capacity of the battery in terms of hours at full output capacity ``p_nom``. Cf. `PyPSA documentation <https://pypsa.readthedocs.io/en/latest/components.html#storage-unit>`_.
-- H2,h,float,Maximum state of charge capacity of the hydrogen storage in terms of hours at full output capacity ``p_nom``. Cf. `PyPSA documentation <https://pypsa.readthedocs.io/en/latest/components.html#storage-unit>`_.
powerplants_filter,--,"use `pandas.query <https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.query.html>`_ strings here, e.g. Country not in ['Germany']",Filter query for the default powerplant database.,
custom_powerplants,--,"use `pandas.query <https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.query.html>`_ strings here, e.g. Country in ['Germany']",Filter query for the custom powerplant database.,
conventional_carriers,--,"Any subset of {nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass}",List of conventional power plants to include in the model from ``resources/powerplants.csv``.,
renewable_capacities_from_OPSD,,"[solar, onwind, offwind]",List of carriers (offwind-ac and offwind-dc are included in offwind) whose capacities 'p_nom' are aligned to the `OPSD renewable power plant list <https://data.open-power-system-data.org/renewable_power_plants/>`_,
estimate_renewable_capacities_from_capacitiy_stats,,,,
"-- Fueltype [ppm], e.g. Wind",,"list of fueltypes strings in PyPSA-Eur, e.g. [onwind, offwind-ac, offwind-dc]",converts ppm Fueltype to PyPSA-EUR Fueltype,
powerplants_filter,--,"use `pandas.query <https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.query.html>`_ strings here, e.g. Country not in ['Germany']",Filter query for the default powerplant database.
custom_powerplants,--,"use `pandas.query <https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.query.html>`_ strings here, e.g. Country in ['Germany']",Filter query for the custom powerplant database.
conventional_carriers,--,"Any subset of {nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass}",List of conventional power plants to include in the model from ``resources/powerplants.csv``.
renewable_capacities_from_OPSD,,"[solar, onwind, offwind]",List of carriers (offwind-ac and offwind-dc are included in offwind) whose capacities 'p_nom' are aligned to the `OPSD renewable power plant list <https://data.open-power-system-data.org/renewable_power_plants/>`_
estimate_renewable_capacities_from_capacitiy_stats,,,
"-- Fueltype [ppm], e.g. Wind",,"list of fueltypes strings in PyPSA-Eur, e.g. [onwind, offwind-ac, offwind-dc]",converts ppm Fueltype to PyPSA-EUR Fueltype

Can't render this file because it has a wrong number of fields in line 5.

View File

@ -2,7 +2,7 @@
cutout,--,"Should be a folder listed in the configuration ``atlite: cutouts:`` (e.g. 'europe-2013-era5') or reference an existing folder in the directory ``cutouts``. Source module must be ERA5.","Specifies the directory where the relevant weather data ist stored."
resource,,,
-- method,--,"Must be 'wind'","A superordinate technology type."
-- turbine,--,"One of turbine types included in `atlite <https://github.com/PyPSA/atlite/tree/master/atlite/resources/windturbine>`_","Specifies the turbine type and its characteristic power curve."
-- turbine,--,"One of turbine types included in `atlite <https://github.com/PyPSA/atlite/tree/master/atlite/resources/windturbine>`__","Specifies the turbine type and its characteristic power curve."
capacity_per_sqkm,:math:`MW/km^2`,float,"Allowable density of wind turbine placement."
corine,--,"Any *realistic* subset of the `CORINE Land Cover code list <http://www.eea.europa.eu/data-and-maps/data/corine-land-cover-2006-raster-1/corine-land-cover-classes-and/clc_legend.csv/at_download/file>`_","Specifies areas according to CORINE Land Cover codes which are generally eligible for AC-connected offshore wind turbine placement."
natura,bool,"{true, false}","Switch to exclude `Natura 2000 <https://en.wikipedia.org/wiki/Natura_2000>`_ natural protection areas. Area is excluded if ``true``."

1 Unit Values Description
2 cutout -- Should be a folder listed in the configuration ``atlite: cutouts:`` (e.g. 'europe-2013-era5') or reference an existing folder in the directory ``cutouts``. Source module must be ERA5. Specifies the directory where the relevant weather data ist stored.
3 resource
4 -- method -- Must be 'wind' A superordinate technology type.
5 -- turbine -- One of turbine types included in `atlite <https://github.com/PyPSA/atlite/tree/master/atlite/resources/windturbine>`_ One of turbine types included in `atlite <https://github.com/PyPSA/atlite/tree/master/atlite/resources/windturbine>`__ Specifies the turbine type and its characteristic power curve.
6 capacity_per_sqkm :math:`MW/km^2` float Allowable density of wind turbine placement.
7 corine -- Any *realistic* subset of the `CORINE Land Cover code list <http://www.eea.europa.eu/data-and-maps/data/corine-land-cover-2006-raster-1/corine-land-cover-classes-and/clc_legend.csv/at_download/file>`_ Specifies areas according to CORINE Land Cover codes which are generally eligible for AC-connected offshore wind turbine placement.
8 natura bool {true, false} Switch to exclude `Natura 2000 <https://en.wikipedia.org/wiki/Natura_2000>`_ natural protection areas. Area is excluded if ``true``.

View File

@ -2,7 +2,7 @@
cutout,--,"Should be a folder listed in the configuration ``atlite: cutouts:`` (e.g. 'europe-2013-era5') or reference an existing folder in the directory ``cutouts``. Source module must be ERA5.","Specifies the directory where the relevant weather data ist stored."
resource,,,
-- method,--,"Must be 'wind'","A superordinate technology type."
-- turbine,--,"One of turbine types included in `atlite <https://github.com/PyPSA/atlite/tree/master/atlite/resources/windturbine>`_","Specifies the turbine type and its characteristic power curve."
-- turbine,--,"One of turbine types included in `atlite <https://github.com/PyPSA/atlite/tree/master/atlite/resources/windturbine>`__","Specifies the turbine type and its characteristic power curve."
capacity_per_sqkm,:math:`MW/km^2`,float,"Allowable density of wind turbine placement."
corine,,,
-- grid_codes,--,"Any subset of the `CORINE Land Cover code list <http://www.eea.europa.eu/data-and-maps/data/corine-land-cover-2006-raster-1/corine-land-cover-classes-and/clc_legend.csv/at_download/file>`_","Specifies areas according to CORINE Land Cover codes which are generally eligible for wind turbine placement."

1 Unit Values Description
2 cutout -- Should be a folder listed in the configuration ``atlite: cutouts:`` (e.g. 'europe-2013-era5') or reference an existing folder in the directory ``cutouts``. Source module must be ERA5. Specifies the directory where the relevant weather data ist stored.
3 resource
4 -- method -- Must be 'wind' A superordinate technology type.
5 -- turbine -- One of turbine types included in `atlite <https://github.com/PyPSA/atlite/tree/master/atlite/resources/windturbine>`_ One of turbine types included in `atlite <https://github.com/PyPSA/atlite/tree/master/atlite/resources/windturbine>`__ Specifies the turbine type and its characteristic power curve.
6 capacity_per_sqkm :math:`MW/km^2` float Allowable density of wind turbine placement.
7 corine
8 -- grid_codes -- Any subset of the `CORINE Land Cover code list <http://www.eea.europa.eu/data-and-maps/data/corine-land-cover-2006-raster-1/corine-land-cover-classes-and/clc_legend.csv/at_download/file>`_ Specifies areas according to CORINE Land Cover codes which are generally eligible for wind turbine placement.

View File

@ -1,11 +1,11 @@
Trigger, Description, Definition, Status
``nH``; i.e. ``2H``-``6H``, Resample the time-resolution by averaging over every ``n`` snapshots, ``prepare_network``: `average_every_nhours() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L110>`_ and its `caller <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L146>`_), In active use
``nH``; i.e. ``2H``-``6H``, Resample the time-resolution by averaging over every ``n`` snapshots, ``prepare_network``: `average_every_nhours() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L110>`_ and its `caller <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L146>`__), In active use
``nSEG``; e.g. ``4380SEG``, "Apply time series segmentation with `tsam <https://tsam.readthedocs.io/en/latest/index.html>`_ package to ``n`` adjacent snapshots of varying lengths based on capacity factors of varying renewables, hydro inflow and load.", ``prepare_network``: apply_time_segmentation(), In active use
``Co2L``, Add an overall absolute carbon-dioxide emissions limit configured in ``electricity: co2limit``. If a float is appended an overall emission limit relative to the emission level given in ``electricity: co2base`` is added (e.g. ``Co2L0.05`` limits emissisions to 5% of what is given in ``electricity: co2base``), ``prepare_network``: `add_co2limit() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L19>`_ and its `caller <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L154>`_, In active use
``Ep``, Add cost for a carbon-dioxide price configured in ``costs: emission_prices: co2`` to ``marginal_cost`` of generators (other emission types listed in ``network.carriers`` possible as well), ``prepare_network``: `add_emission_prices() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L24>`_ and its `caller <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L158>`_, In active use
``Co2L``, Add an overall absolute carbon-dioxide emissions limit configured in ``electricity: co2limit``. If a float is appended an overall emission limit relative to the emission level given in ``electricity: co2base`` is added (e.g. ``Co2L0.05`` limits emissisions to 5% of what is given in ``electricity: co2base``), ``prepare_network``: `add_co2limit() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L19>`_ and its `caller <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L154>`__, In active use
``Ep``, Add cost for a carbon-dioxide price configured in ``costs: emission_prices: co2`` to ``marginal_cost`` of generators (other emission types listed in ``network.carriers`` possible as well), ``prepare_network``: `add_emission_prices() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L24>`_ and its `caller <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L158>`__, In active use
``CCL``, Add minimum and maximum levels of generator nominal capacity per carrier for individual countries. These can be specified in the file linked at ``electricity: agg_p_nom_limits`` in the configuration. File defaults to ``data/agg_p_nom_minmax.csv``., ``solve_network``, In active use
``EQ``, "Require each country or node to on average produce a minimal share of its total consumption itself. Example: ``EQ0.5c`` demands each country to produce on average at least 50% of its consumption; ``EQ0.5`` demands each node to produce on average at least 50% of its consumption.", ``solve_network``, In active use
``ATK``, "Require each node to be autarkic. Example: ``ATK`` removes all lines and links. ``ATKc`` removes all cross-border lines and links.", ``prepare_network``, In active use
``BAU``, Add a per-``carrier`` minimal overall capacity; i.e. at least ``40GW`` of ``OCGT`` in Europe; configured in ``electricity: BAU_mincapacities``, ``solve_network``: `add_opts_constraints() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/solve_network.py#L66>`_, Untested
``SAFE``, Add a capacity reserve margin of a certain fraction above the peak demand to which renewable generators and storage do *not* contribute. Ignores network., ``solve_network`` `add_opts_constraints() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/solve_network.py#L73>`_, Untested
``BAU``, Add a per-``carrier`` minimal overall capacity; i.e. at least ``40GW`` of ``OCGT`` in Europe; configured in ``electricity: BAU_mincapacities``, ``solve_network``: `add_opts_constraints() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/solve_network.py#L66>`__, Untested
``SAFE``, Add a capacity reserve margin of a certain fraction above the peak demand to which renewable generators and storage do *not* contribute. Ignores network., ``solve_network`` `add_opts_constraints() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/solve_network.py#L73>`__, Untested
``carrier+{c|p}factor``, "Alter the capital cost (``c``) or installable potential (``p``) of a carrier by a factor. Example: ``solar+c0.5`` reduces the capital cost of solar to 50\% of original values.", ``prepare_network``, In active use

1 Trigger Description Definition Status
2 ``nH``; i.e. ``2H``-``6H`` Resample the time-resolution by averaging over every ``n`` snapshots ``prepare_network``: `average_every_nhours() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L110>`_ and its `caller <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L146>`_) ``prepare_network``: `average_every_nhours() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L110>`_ and its `caller <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L146>`__) In active use
3 ``nSEG``; e.g. ``4380SEG`` Apply time series segmentation with `tsam <https://tsam.readthedocs.io/en/latest/index.html>`_ package to ``n`` adjacent snapshots of varying lengths based on capacity factors of varying renewables, hydro inflow and load. ``prepare_network``: apply_time_segmentation() In active use
4 ``Co2L`` Add an overall absolute carbon-dioxide emissions limit configured in ``electricity: co2limit``. If a float is appended an overall emission limit relative to the emission level given in ``electricity: co2base`` is added (e.g. ``Co2L0.05`` limits emissisions to 5% of what is given in ``electricity: co2base``) ``prepare_network``: `add_co2limit() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L19>`_ and its `caller <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L154>`_ ``prepare_network``: `add_co2limit() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L19>`_ and its `caller <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L154>`__ In active use
5 ``Ep`` Add cost for a carbon-dioxide price configured in ``costs: emission_prices: co2`` to ``marginal_cost`` of generators (other emission types listed in ``network.carriers`` possible as well) ``prepare_network``: `add_emission_prices() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L24>`_ and its `caller <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L158>`_ ``prepare_network``: `add_emission_prices() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L24>`_ and its `caller <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L158>`__ In active use
6 ``CCL`` Add minimum and maximum levels of generator nominal capacity per carrier for individual countries. These can be specified in the file linked at ``electricity: agg_p_nom_limits`` in the configuration. File defaults to ``data/agg_p_nom_minmax.csv``. ``solve_network`` In active use
7 ``EQ`` Require each country or node to on average produce a minimal share of its total consumption itself. Example: ``EQ0.5c`` demands each country to produce on average at least 50% of its consumption; ``EQ0.5`` demands each node to produce on average at least 50% of its consumption. ``solve_network`` In active use
8 ``ATK`` Require each node to be autarkic. Example: ``ATK`` removes all lines and links. ``ATKc`` removes all cross-border lines and links. ``prepare_network`` In active use
9 ``BAU`` Add a per-``carrier`` minimal overall capacity; i.e. at least ``40GW`` of ``OCGT`` in Europe; configured in ``electricity: BAU_mincapacities`` ``solve_network``: `add_opts_constraints() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/solve_network.py#L66>`_ ``solve_network``: `add_opts_constraints() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/solve_network.py#L66>`__ Untested
10 ``SAFE`` Add a capacity reserve margin of a certain fraction above the peak demand to which renewable generators and storage do *not* contribute. Ignores network. ``solve_network`` `add_opts_constraints() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/solve_network.py#L73>`_ ``solve_network`` `add_opts_constraints() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/solve_network.py#L73>`__ Untested
11 ``carrier+{c|p}factor`` Alter the capital cost (``c``) or installable potential (``p``) of a carrier by a factor. Example: ``solar+c0.5`` reduces the capital cost of solar to 50\% of original values. ``prepare_network`` In active use

View File

@ -2,7 +2,7 @@
cutout,--,"Should be a folder listed in the configuration ``atlite: cutouts:`` (e.g. 'europe-2013-era5') or reference an existing folder in the directory ``cutouts``. Source module can be ERA5 or SARAH-2.","Specifies the directory where the relevant weather data ist stored that is specified at ``atlite/cutouts`` configuration. Both ``sarah`` and ``era5`` work."
resource,,,
-- method,--,"Must be 'pv'","A superordinate technology type."
-- panel,--,"One of {'Csi', 'CdTe', 'KANENA'} as defined in `atlite <https://github.com/PyPSA/atlite/tree/master/atlite/resources/solarpanel>`_","Specifies the solar panel technology and its characteristic attributes."
-- panel,--,"One of {'Csi', 'CdTe', 'KANENA'} as defined in `atlite <https://github.com/PyPSA/atlite/tree/master/atlite/resources/solarpanel>`__","Specifies the solar panel technology and its characteristic attributes."
-- orientation,,,
-- -- slope,°,"Realistically any angle in [0., 90.]","Specifies the tilt angle (or slope) of the solar panel. A slope of zero corresponds to the face of the panel aiming directly overhead. A positive tilt angle steers the panel towards the equator."
-- -- azimuth,°,"Any angle in [0., 360.]","Specifies the `azimuth <https://en.wikipedia.org/wiki/Azimuth>`_ orientation of the solar panel. South corresponds to 180.°."

1 Unit Values Description
2 cutout -- Should be a folder listed in the configuration ``atlite: cutouts:`` (e.g. 'europe-2013-era5') or reference an existing folder in the directory ``cutouts``. Source module can be ERA5 or SARAH-2. Specifies the directory where the relevant weather data ist stored that is specified at ``atlite/cutouts`` configuration. Both ``sarah`` and ``era5`` work.
3 resource
4 -- method -- Must be 'pv' A superordinate technology type.
5 -- panel -- One of {'Csi', 'CdTe', 'KANENA'} as defined in `atlite <https://github.com/PyPSA/atlite/tree/master/atlite/resources/solarpanel>`_ One of {'Csi', 'CdTe', 'KANENA'} as defined in `atlite <https://github.com/PyPSA/atlite/tree/master/atlite/resources/solarpanel>`__ Specifies the solar panel technology and its characteristic attributes.
6 -- orientation
7 -- -- slope ° Realistically any angle in [0., 90.] Specifies the tilt angle (or slope) of the solar panel. A slope of zero corresponds to the face of the panel aiming directly overhead. A positive tilt angle steers the panel towards the equator.
8 -- -- azimuth ° Any angle in [0., 360.] Specifies the `azimuth <https://en.wikipedia.org/wiki/Azimuth>`_ orientation of the solar panel. South corresponds to 180.°.

View File

@ -1,3 +1,3 @@
,Unit,Values,Description
name,--,"One of {'gurobi', 'cplex', 'cbc', 'glpk', 'ipopt'}; potentially more possible","Solver to use for optimisation problems in the workflow; e.g. clustering and linear optimal power flow."
opts,--,"Parameter list for `Gurobi <https://www.gurobi.com/documentation/8.1/refman/parameters.html>`_ and `CPLEX <https://www.ibm.com/support/knowledgecenter/SSSA5P_12.5.1/ilog.odms.cplex.help/CPLEX/Parameters/topics/introListAlpha.html>`_","Solver specific parameter settings."
opts,--,"Parameter list for `Gurobi <https://www.gurobi.com/documentation/8.1/refman/parameters.html>`_ and `CPLEX <https://www.ibm.com/docs/en/icos/20.1.0?topic=cplex-topical-list-parameters>`_","Solver specific parameter settings."

1 Unit Values Description
2 name -- One of {'gurobi', 'cplex', 'cbc', 'glpk', 'ipopt'}; potentially more possible Solver to use for optimisation problems in the workflow; e.g. clustering and linear optimal power flow.
3 opts -- Parameter list for `Gurobi <https://www.gurobi.com/documentation/8.1/refman/parameters.html>`_ and `CPLEX <https://www.ibm.com/support/knowledgecenter/SSSA5P_12.5.1/ilog.odms.cplex.help/CPLEX/Parameters/topics/introListAlpha.html>`_ Parameter list for `Gurobi <https://www.gurobi.com/documentation/8.1/refman/parameters.html>`_ and `CPLEX <https://www.ibm.com/docs/en/icos/20.1.0?topic=cplex-topical-list-parameters>`_ Solver specific parameter settings.

View File

@ -14,4 +14,4 @@ enable,,,
-- retrieve_cutout,bool,"{true, false}","Switch to enable the retrieval of cutouts from zenodo with :mod:`retrieve_cutout`."
-- build_natura_raster,bool,"{true, false}","Switch to enable the creation of the raster ``natura.tiff`` via the rule :mod:`build_natura_raster`."
-- retrieve_natura_raster,bool,"{true, false}","Switch to enable the retrieval of ``natura.tiff`` from zenodo with :mod:`retrieve_natura_raster`."
-- custom_busmap,bool,"{true, false}","Switch to enable the use of custom busmaps in rule :mod:`cluster_network`. If activated the rule looks for provided busmaps at ``data/custom_busmap_elec{year}_s{simpl}_{clusters}.csv`` which should have the same format as ``resources/busmap_elec{year}_s{simpl}_{clusters}.csv``, i.e. the index should contain the buses of ``networks/elec_s{simpl}.nc``."
-- custom_busmap,bool,"{true, false}","Switch to enable the use of custom busmaps in rule :mod:`cluster_network`. If activated the rule looks for provided busmaps at ``data/custom_busmap_elec{weather_year}_s{simpl}_{clusters}.csv`` which should have the same format as ``resources/busmap_elec{weather_year}_s{simpl}_{clusters}.csv``, i.e. the index should contain the buses of ``networks/elec_s{simpl}.nc``."

1 Unit Values Description
14 -- retrieve_cutout bool {true, false} Switch to enable the retrieval of cutouts from zenodo with :mod:`retrieve_cutout`.
15 -- build_natura_raster bool {true, false} Switch to enable the creation of the raster ``natura.tiff`` via the rule :mod:`build_natura_raster`.
16 -- retrieve_natura_raster bool {true, false} Switch to enable the retrieval of ``natura.tiff`` from zenodo with :mod:`retrieve_natura_raster`.
17 -- custom_busmap bool {true, false} Switch to enable the use of custom busmaps in rule :mod:`cluster_network`. If activated the rule looks for provided busmaps at ``data/custom_busmap_elec{year}_s{simpl}_{clusters}.csv`` which should have the same format as ``resources/busmap_elec{year}_s{simpl}_{clusters}.csv``, i.e. the index should contain the buses of ``networks/elec_s{simpl}.nc``. Switch to enable the use of custom busmaps in rule :mod:`cluster_network`. If activated the rule looks for provided busmaps at ``data/custom_busmap_elec{weather_year}_s{simpl}_{clusters}.csv`` which should have the same format as ``resources/busmap_elec{weather_year}_s{simpl}_{clusters}.csv``, i.e. the index should contain the buses of ``networks/elec_s{simpl}.nc``.

View File

@ -18,7 +18,8 @@ Top-level configuration
.. literalinclude:: ../config.default.yaml
:language: yaml
:lines: 5-12,20,27-34
:lines: 5-12,20,31-38
.. csv-table::
:header-rows: 1
@ -42,7 +43,7 @@ facilitate running multiple scenarios through a single command
snakemake -j 1 solve_all_networks
For each wildcard, a **list of values** is provided. The rule ``solve_all_networks`` will trigger the rules for creating ``results/networks/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc`` for **all combinations** of the provided wildcard values as defined by Python's `itertools.product(...) <https://docs.python.org/2/library/itertools.html#itertools.product>`_ function that snakemake's `expand(...) function <https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#targets>`_ uses.
For each wildcard, a **list of values** is provided. The rule ``solve_all_networks`` will trigger the rules for creating ``results/networks/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc`` for **all combinations** of the provided wildcard values as defined by Python's `itertools.product(...) <https://docs.python.org/2/library/itertools.html#itertools.product>`_ function that snakemake's `expand(...) function <https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#targets>`_ uses.
An exemplary dependency graph (starting from the simplification rules) then looks like this:
@ -50,7 +51,8 @@ An exemplary dependency graph (starting from the simplification rules) then look
.. literalinclude:: ../config.default.yaml
:language: yaml
:lines: 14-18
:start-at: scenario:
:end-before: countries:
.. csv-table::
:header-rows: 1
@ -66,7 +68,8 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml
:language: yaml
:lines: 22-25
:start-at: snapshots:
:end-before: enable:
.. csv-table::
:header-rows: 1
@ -80,7 +83,8 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml
:language: yaml
:lines: 36-60
:start-at: electricity:
:end-before: atlite:
.. csv-table::
:header-rows: 1
@ -93,11 +97,14 @@ Specifies the temporal range to build an energy system model for as arguments to
.. _atlite_cf:
``atlite``
=============
==========
Define and specify the ``atlite.Cutout`` used for calculating renewable potentials and time-series. All options except for ``features`` are directly used as `cutout parameters <https://atlite.readthedocs.io/en/latest/ref_api.html#cutout>`_.
.. literalinclude:: ../config.default.yaml
:language: yaml
:lines: 62-75
:start-at: atlite:
:end-before: renewable:
.. csv-table::
:header-rows: 1
@ -114,7 +121,8 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml
:language: yaml
:lines: 77-94
:start-at: renewable:
:end-before: offwind-ac:
.. csv-table::
:header-rows: 1
@ -126,7 +134,8 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml
:language: yaml
:lines: 77,95-107
:start-at: offwind-ac:
:end-before: offwind-dc:
.. csv-table::
:header-rows: 1
@ -138,7 +147,8 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml
:language: yaml
:lines: 77,108-121
:start-at: offwind-dc:
:end-before: solar:
.. csv-table::
:header-rows: 1
@ -150,7 +160,8 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml
:language: yaml
:lines: 77,122-141
:start-at: solar:
:end-before: hydro:
.. csv-table::
:header-rows: 1
@ -162,7 +173,8 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml
:language: yaml
:lines: 77,142-147
:start-at: hydro:
:end-before: lines:
.. csv-table::
:header-rows: 1
@ -176,7 +188,8 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml
:language: yaml
:lines: 149-157
:start-at: lines:
:end-before: links:
.. csv-table::
:header-rows: 1
@ -190,7 +203,8 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml
:language: yaml
:lines: 159-163
:start-at: links:
:end-before: transformers:
.. csv-table::
:header-rows: 1
@ -204,7 +218,8 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml
:language: yaml
:lines: 165-168
:start-at: transformers:
:end-before: load:
.. csv-table::
:header-rows: 1
@ -218,7 +233,8 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml
:language: yaml
:lines: 170-176
:start-at: load:
:end-before: costs:
.. csv-table::
:header-rows: 1
@ -232,7 +248,8 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml
:language: yaml
:lines: 178-190
:start-after: scaling_factor:
:end-before: solving:
.. csv-table::
:header-rows: 1
@ -253,7 +270,8 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml
:language: yaml
:lines: 192-202
:start-at: solving:
:end-before: solver:
.. csv-table::
:header-rows: 1
@ -265,7 +283,8 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml
:language: yaml
:lines: 192,203-219
:start-at: solver:
:end-before: plotting:
.. csv-table::
:header-rows: 1
@ -279,7 +298,7 @@ Specifies the temporal range to build an energy system model for as arguments to
.. literalinclude:: ../config.default.yaml
:language: yaml
:lines: 221-299
:start-at: plotting:
.. csv-table::
:header-rows: 1

View File

@ -9,8 +9,8 @@ PyPSA-Eur: An Open Optimisation Model of the European Transmission System
.. image:: https://img.shields.io/github/v/release/pypsa/pypsa-eur?include_prereleases
:alt: GitHub release (latest by date including pre-releases)
.. image:: https://travis-ci.org/PyPSA/pypsa-eur.svg?branch=master
:target: https://travis-ci.org/PyPSA/pypsa-eur
.. image:: https://github.com/pypsa/pypsa-eur/actions/workflows/ci.yaml/badge.svg
:target: https://github.com/PyPSA/pypsa-eur/actions
.. image:: https://readthedocs.org/projects/pypsa-eur/badge/?version=latest
:target: https://pypsa-eur.readthedocs.io/en/latest/?badge=latest
@ -49,7 +49,18 @@ The restriction to freely available and open data encourages the open exchange o
PyPSA-Eur is designed to be imported into the open toolbox `PyPSA <https://www.pypsa.org>`_ for which `documentation <https://pypsa.org/doc>`_ is available as well.
This project is maintained by the `Energy System Modelling group <https://www.iai.kit.edu/english/2338.php>`_ at the `Institute for Automation and Applied Informatics <https://www.iai.kit.edu/english/index.php>`_ at the `Karlsruhe Institute of Technology <http://www.kit.edu/english/index.php>`_. The group is funded by the `Helmholtz Association <https://www.helmholtz.de/en/>`_ until 2024. Previous versions were developed by the `Renewable Energy Group <https://fias.uni-frankfurt.de/physics/schramm/renewable-energy-system-and-network-analysis/>`_ at `FIAS <https://fias.uni-frankfurt.de/>`_ to carry out simulations for the `CoNDyNet project <http://condynet.de/>`_, financed by the `German Federal Ministry for Education and Research (BMBF) <https://www.bmbf.de/en/index.html>`_ as part of the `Stromnetze Research Initiative <http://forschung-stromnetze.info/projekte/grundlagen-und-konzepte-fuer-effiziente-dezentrale-stromnetze/>`_.
This project is currently maintained by the `Department of Digital
Transformation in Energy Systems <https:/www.ensys.tu-berlin.de>`_ at the
`Technische Universität Berlin <https://www.tu.berlin>`_. Previous versions were
developed within the `IAI <http://www.iai.kit.edu>`_ at the `Karlsruhe Institute of
Technology (KIT) <http://www.kit.edu/english/index.php>`_ and by the `Renewable
Energy Group
<https://fias.uni-frankfurt.de/physics/schramm/renewable-energy-system-and-network-analysis/>`_
at `FIAS <https://fias.uni-frankfurt.de/>`_ to carry out simulations for the
`CoNDyNet project <http://condynet.de/>`_, financed by the `German Federal
Ministry for Education and Research (BMBF) <https://www.bmbf.de/en/index.html>`_
as part of the `Stromnetze Research Initiative
<http://forschung-stromnetze.info/projekte/grundlagen-und-konzepte-fuer-effiziente-dezentrale-stromnetze/>`_.
A version of the model that adds building heating, transport and industry sectors to the model,
as well as gas networks, is currently being developed in the `PyPSA-Eur-Sec repository <https://github.com/pypsa/pypsa-eur-sec>`_.
@ -141,7 +152,7 @@ If you are (relatively) new to energy system modelling and optimisation
and plan to use PyPSA-Eur, the following resources are *one way* to get started
in addition to reading this documentation.
- Documentation of `PyPSA <https://pypsa.readthedocs.io>`_, the package for
- Documentation of `PyPSA <https://pypsa.readthedocs.io>`__, the package for
simulating and optimising modern power systems which PyPSA-Eur uses under the hood.
- Course on `Energy System Modelling <https://nworbmot.org/courses/esm-2019/>`_,
Karlsruhe Institute of Technology (KIT), `Dr. Tom Brown <https://nworbmot.org>`_
@ -196,7 +207,7 @@ Licence
PyPSA-Eur work is released under multiple licenses:
* All original source code is licensed as free software under `GPL-3.0-or-later <LICENSES/GPL-3.0-or-later.txt>`_.
* All original source code is licensed as free software under `MIT <LICENSES/MIT.txt>`_.
* The documentation is licensed under `CC-BY-4.0 <LICENSES/CC-BY-4.0.txt>`_.
* Configuration files are mostly licensed under `CC0-1.0 <LICENSES/CC0-1.0.txt>`_.
* Data files are licensed under `CC-BY-4.0 <LICENSES/CC-BY-4.0.txt>`_.

View File

@ -1,5 +1,5 @@
REM SPDX-FileCopyrightText: 2019-2020 The PyPSA-Eur Authors
REM SPDX-License-Identifier: GPL-3.0-or-later
REM SPDX-License-Identifier: MIT
@ECHO OFF

View File

@ -9,50 +9,6 @@ Plotting and Summary
.. warning:: The corresponding code is currently under revision and has only minimal documentation.
.. _flh:
Rule ``build_country_flh``
=============================
.. graphviz::
:align: center
digraph snakemake_dag {
graph [bgcolor=white,
margin=0,
size="8,5"
];
node [fontname=sans,
fontsize=10,
penwidth=2,
shape=box,
style=rounded
];
edge [color=grey,
penwidth=2
];
0 [color="0.31 0.6 0.85",
fillcolor=gray,
label=build_country_flh,
style=filled];
1 [color="0.06 0.6 0.85",
label=base_network];
1 -> 0;
2 [color="0.42 0.6 0.85",
label=build_natura_raster];
2 -> 0;
3 [color="0.58 0.6 0.85",
label=build_shapes];
3 -> 0;
4 [color="0.14 0.6 0.85",
label=build_cutout];
4 -> 0;
}
|
.. automodule:: build_country_flh
.. _plot_potentials:
Rule ``plot_p_nom_max``
@ -128,8 +84,8 @@ Rule ``make_summary``
Rule ``plot_summary``
========================
.. graphviz::
:align: center
.. .. graphviz::
.. :align: center

View File

@ -45,7 +45,6 @@ together into a detailed PyPSA network stored in ``networks/elec.nc``.
preparation/prepare_links_p_nom
preparation/base_network
preparation/build_bus_regions
preparation/build_natura_raster
preparation/build_powerplants
preparation/build_renewable_profiles
preparation/build_hydro_profile

View File

@ -21,9 +21,59 @@ Rule ``retrieve_databundle``
Rule ``retrieve_cutout``
------------------------
.. automodule:: retrieve_cutout
.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.3517949.svg
:target: https://doi.org/10.5281/zenodo.3517949
Cutouts are spatio-temporal subsets of the European weather data from the `ECMWF ERA5 <https://software.ecmwf.int/wiki/display/CKB/ERA5+data+documentation>`_ reanalysis dataset and the `CMSAF SARAH-2 <https://wui.cmsaf.eu/safira/action/viewDoiDetails?acronym=SARAH_V002>`_ solar surface radiation dataset for the year 2013.
They have been prepared by and are for use with the `atlite <https://github.com/PyPSA/atlite>`_ tool. You can either generate them yourself using the ``build_cutouts`` rule or retrieve them directly from `zenodo <https://doi.org/10.5281/zenodo.3517949>`__ through the rule ``retrieve_cutout``.
The :ref:`tutorial` uses a smaller cutout than required for the full model (30 MB), which is also automatically downloaded.
.. note::
To download cutouts yourself from the `ECMWF ERA5 <https://software.ecmwf.int/wiki/display/CKB/ERA5+data+documentation>`_ you need to `set up the CDS API <https://cds.climate.copernicus.eu/api-how-to>`_.
**Relevant Settings**
.. code:: yaml
tutorial:
enable:
build_cutout:
.. seealso::
Documentation of the configuration file ``config.yaml`` at
:ref:`toplevel_cf`
**Outputs**
- ``cutouts/{cutout}``: weather data from either the `ERA5 <https://www.ecmwf.int/en/forecasts/datasets/reanalysis-datasets/era5>`_ reanalysis weather dataset or `SARAH-2 <https://wui.cmsaf.eu/safira/action/viewProduktSearch>`_ satellite-based historic weather data.
.. seealso::
For details see :mod:`build_cutout` and read the `atlite documentation <https://atlite.readthedocs.io>`_.
Rule ``retrieve_natura_raster``
-------------------------------
.. automodule:: retrieve_natura_raster
.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.4706686.svg
:target: https://doi.org/10.5281/zenodo.4706686
This rule, as a substitute for :mod:`build_natura_raster`, downloads an already rasterized version (`natura.tiff <https://zenodo.org/record/4706686/files/natura.tiff>`_) of `Natura 2000 <https://en.wikipedia.org/wiki/Natura_2000>`_ natural protection areas to reduce computation times. The file is placed into the ``resources`` sub-directory.
**Relevant Settings**
.. code:: yaml
enable:
build_natura_raster:
.. seealso::
Documentation of the configuration file ``config.yaml`` at
:ref:`toplevel_cf`
**Outputs**
- ``resources/natura.tiff``: Rasterized version of `Natura 2000 <https://en.wikipedia.org/wiki/Natura_2000>`_ natural protection areas to reduce computation times.
.. seealso::
For details see :mod:`build_natura_raster`.

View File

@ -1,5 +1,5 @@
..
SPDX-FileCopyrightText: 2019-2020 The PyPSA-Eur Authors
SPDX-FileCopyrightText: 2019-2021 The PyPSA-Eur Authors
SPDX-License-Identifier: CC-BY-4.0
@ -11,13 +11,140 @@ Release Notes
Upcoming Release
================
* Fix: Value for ``co2base`` in ``config.yaml`` adjusted to 1.487e9 t CO2-eq (from 3.1e9 t CO2-eq). The new value represents emissions related to the electricity sector for EU+UK. The old value was ~2x too high and used when the emissions wildcard in ``{opts}`` was used.
* Add an efficiency factor of 88.55% to offshore wind capacity factors
as a proxy for wake losses. More rigorous modelling is `planned <https://github.com/PyPSA/pypsa-eur/issues/153>`_
[`#277 <https://github.com/PyPSA/pypsa-eur/pull/277>`_].
* Add option to include marginal costs of links representing fuel cells, electrolysis, and battery inverters
* The default deployment density of AC- and DC-connected offshore wind capacity is reduced from 3 MW/sqkm
to a more conservative estimate of 2 MW/sqkm [`#280 <https://github.com/PyPSA/pypsa-eur/pull/280>`_].
* Following discussion in `#285 <https://github.com/PyPSA/pypsa-eur/issues/285>`_ we have disabled the
correction factor for solar PV capacity factors by default while satellite data is used.
A correction factor of 0.854337 is recommended if reanalysis data like ERA5 is used.
* Resource definitions for memory usage now follow [Snakemake standard resource definition](https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#standard-resources) ```mem_mb`` rather than ``mem``.
* Network building is made deterministic by supplying a fixed random state to network clustering routines.
PyPSA-Eur 0.4.0 (22th September 2021)
=====================================
**New Features and Changes**
* With this release, we change the license from copyleft GPLv3 to the more
liberal MIT license with the consent of all contributors
[`#276 <https://github.com/PyPSA/pypsa-eur/pull/276>`_].
* Switch to the new major ``atlite`` release v0.2. The version upgrade comes
along with significant speed up for the rule ``build_renewable_profiles.py``
(~factor 2). A lot of the code which calculated the land-use availability is now
outsourced and does not rely on ``glaes``, ``geokit`` anymore. This facilitates
the environment building and version compatibility of ``gdal``, ``libgdal`` with
other packages [`#224 <https://github.com/PyPSA/pypsa-eur/pull/224>`_].
* Implemented changes to ``n.snapshot_weightings`` in new PyPSA version v0.18
(cf. `PyPSA/PyPSA/#227 <https://github.com/PyPSA/PyPSA/pull/227>`_)
[`#259 <https://github.com/PyPSA/pypsa-eur/pull/259>`_].
* Add option to pre-aggregate nodes without power injections (positive or
negative, i.e. generation or demand) to electrically closest nodes or neighbors
in ``simplify_network``. Defaults to ``False``. This affects nodes that are no
substations or have no offshore connection.
* In :mod:`simplify_network`, bus columns with no longer correct entries are
removed (symbol, tags, under_construction, substation_lv, substation_off)
[`#219 <https://github.com/PyPSA/pypsa-eur/pull/219>`_]
* Add option to include marginal costs of links representing fuel cells,
electrolysis, and battery inverters
[`#232 <https://github.com/PyPSA/pypsa-eur/pull/232>`_].
* The rule and script ``build_country_flh`` are removed as they are no longer
used or maintained.
* The connection cost of generators in :mod:`simplify_network` are now reported
in ``resources/connection_costs_s{simpl}.csv``
[`#261 <https://github.com/PyPSA/pypsa-eur/pull/261>`_].
* The tutorial cutout was renamed from ``cutouts/europe-2013-era5.nc`` to
``cutouts/europe-2013-era5-tutorial.nc`` to accomodate tutorial and productive
cutouts side-by-side.
* The flag ``keep_all_available_areas`` in the configuration for renewable
potentials was deprecated and now defaults to ``True``.
* Update dependencies in ``envs/environment.yaml``
[`#257 <https://github.com/PyPSA/pypsa-eur/pull/257>`_]
* Continuous integration testing switches to Github Actions from Travis CI
[`#252 <https://github.com/PyPSA/pypsa-eur/pull/252>`_].
* Documentation on readthedocs.io is now built with ``pip`` only and no longer
requires ``conda`` [`#267 <https://github.com/PyPSA/pypsa-eur/pull/267>`_].
* Use ``Citation.cff`` [`#273 <https://github.com/PyPSA/pypsa-eur/pull/273>`_].
**Bugs and Compatibility**
* Support for PyPSA v0.18 [`#268 <https://github.com/PyPSA/pypsa-eur/pull/268>`_].
* Minimum Python version set to ``3.8``.
* Removed ``six`` dependency [`#245 <https://github.com/PyPSA/pypsa-eur/pull/245>`_].
* Update :mod:`plot_network` and :mod:`make_summary` rules to latest PyPSA
versions [`#270 <https://github.com/PyPSA/pypsa-eur/pull/270>`_].
* Keep converter links to store components when using the ``ATK``
wildcard and only remove DC links [`#214 <https://github.com/PyPSA/pypsa-eur/pull/214>`_].
* Value for ``co2base`` in ``config.yaml`` adjusted to 1.487e9 t CO2-eq
(from 3.1e9 t CO2-eq). The new value represents emissions related to the
electricity sector for EU+UK+Balkan. The old value was too high and used when
the emissions wildcard in ``{opts}`` was used
[`#233 <https://github.com/PyPSA/pypsa-eur/pull/233>`_].
* Add escape in :mod:`base_network` if all TYNDP links are already
contained in the network
[`#246 <https://github.com/PyPSA/pypsa-eur/pull/246>`_].
* In :mod:`solve_operations_network` the optimised capacities are now
fixed for all extendable links, not only HVDC links
[`#244 <https://github.com/PyPSA/pypsa-eur/pull/244>`_].
* The ``focus_weights`` are now also considered when pre-clustering in
the :mod:`simplify_network` rule
[`#241 <https://github.com/PyPSA/pypsa-eur/pull/241>`_].
* in :mod:`build_renewable_profile` where offshore wind profiles could
no longer be created [`#249 <https://github.com/PyPSA/pypsa-eur/pull/249>`_].
* Lower expansion limit of extendable carriers is now set to the
existing capacity, i.e. ``p_nom_min = p_nom`` (0 before). Simultaneously, the
upper limit (``p_nom_max``) is now the maximum of the installed capacity
(``p_nom``) and the previous estimate based on land availability (``p_nom_max``)
[`#260 <https://github.com/PyPSA/pypsa-eur/pull/260>`_].
* Solving an operations network now includes optimized store capacities
as well. Before only lines, links, generators and storage units were considered
[`#269 <https://github.com/PyPSA/pypsa-eur/pull/269>`_].
* With ``load_shedding: true`` in the solving options of ``config.yaml``
load shedding generators are only added at the AC buses, excluding buses for H2
and battery stores [`#269 <https://github.com/PyPSA/pypsa-eur/pull/269>`_].
* Delete duplicated capital costs at battery discharge link
[`#240 <https://github.com/PyPSA/pypsa-eur/pull/240>`_].
* Propagate the solver log file name to the solver. Previously, the
PyPSA network solving functions were not told about the solver logfile specified
in the Snakemake file [`#247 <https://github.com/PyPSA/pypsa-eur/pull/247>`_]
PyPSA-Eur 0.3.0 (7th December 2020)
==================================
===================================
**New Features**
@ -40,6 +167,7 @@ Using the ``{opts}`` wildcard for scenarios:
uses the `tsam <https://tsam.readthedocs.io/en/latest/index.html>`_ package
[`#186 <https://github.com/PyPSA/pypsa-eur/pull/186>`_].
More OPSD integration:
* Add renewable power plants from `OPSD <https://data.open-power-system-data.org/renewable_power_plants/2020-08-25>`_ to the network for specified technologies.
@ -197,7 +325,7 @@ Release Process
* Tag a release on Github via ``git tag v0.x.x``, ``git push``, ``git push --tags``. Include release notes in the tag message.
* Upload code to `zenodo code repository <https://doi.org/10.5281/zenodo.3520874>`_ with `GNU GPL 3.0 <https://www.gnu.org/licenses/gpl-3.0.en.html>`_ license.
* Upload code to `zenodo code repository <https://doi.org/10.5281/zenodo.3520874>`_ with `MIT license <https://opensource.org/licenses/MIT>`_.
* Create pre-built networks for ``config.default.yaml`` by running ``snakemake -j 1 extra_components_all_networks``.

21
doc/requirements.txt Normal file
View File

@ -0,0 +1,21 @@
# SPDX-FileCopyrightText: : 2019-2021 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: CC0-1.0
sphinx
sphinx_rtd_theme
pypsa
vresutils>=0.3.1
powerplantmatching>=0.4.8
atlite>=0.2.2
dask<=2021.3.1
# cartopy
scikit-learn
pycountry
pyyaml
seaborn
memory_profiler
tables
descartes

View File

@ -14,7 +14,7 @@ Tutorial
<iframe width="832" height="468" src="https://www.youtube.com/embed/mAwhQnNRIvs" frameborder="0" allow="accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe>
Before getting started with **PyPSA-Eur** it makes sense to be familiar
with its general modelling framework `PyPSA <https://pypsa.readthedocs.io>`_.
with its general modelling framework `PyPSA <https://pypsa.readthedocs.io>`__.
Running the tutorial requires limited computational resources compared to the full model,
which allows the user to explore most of its functionalities on a local machine.
@ -53,41 +53,41 @@ Likewise, the example's temporal scope can be restricted (e.g. to a single month
.. literalinclude:: ../config.tutorial.yaml
:language: yaml
:lines: 22-25
:lines: 24-27
It is also possible to allow less or more carbon-dioxide emissions. Here, we limit the emissions of Germany 100 Megatonnes per year.
.. literalinclude:: ../config.tutorial.yaml
:language: yaml
:lines: 36,38
:lines: 38,40
PyPSA-Eur also includes a database of existing conventional powerplants.
We can select which types of powerplants we like to be included with fixed capacities:
.. literalinclude:: ../config.tutorial.yaml
:language: yaml
:lines: 36,52
:lines: 38,54
To accurately model the temporal and spatial availability of renewables such as wind and solar energy, we rely on historical weather data.
It is advisable to adapt the required range of coordinates to the selection of countries.
.. literalinclude:: ../config.tutorial.yaml
:language: yaml
:lines: 54-62
:lines: 56-63
We can also decide which weather data source should be used to calculate potentials and capacity factor time-series for each carrier.
For example, we may want to use the ERA-5 dataset for solar and not the default SARAH-2 dataset.
.. literalinclude:: ../config.tutorial.yaml
:language: yaml
:lines: 64,107-108
:lines: 65,108-109
Finally, it is possible to pick a solver. For instance, this tutorial uses the open-source solvers CBC and Ipopt and does not rely
on the commercial solvers Gurobi or CPLEX (for which free academic licenses are available).
.. literalinclude:: ../config.tutorial.yaml
:language: yaml
:lines: 170,180-181
:lines: 171,181-182
.. note::
@ -119,8 +119,8 @@ orders ``snakemake`` to run the script ``solve_network`` that produces the solve
.. code::
rule solve_network:
input: "networks/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
output: "results/networks/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
input: "networks/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
output: "results/networks/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc"
[...]
script: "scripts/solve_network.py"

View File

@ -126,12 +126,11 @@ The ``{technology}`` wildcard
The ``{technology}`` wildcard specifies for which renewable energy technology to produce availablity time
series and potentials using the rule :mod:`build_renewable_profiles`.
It can take the values ``onwind``, ``offwind-ac``, ``offwind-dc``, and ``solar`` but **not** ``hydro``
(since hydroelectric plant profiles are created by a different rule.
(since hydroelectric plant profiles are created by a different rule).
The wildcard can moreover be used to create technology specific figures and summaries.
For instance ``{technology}`` can be used to plot regionally disaggregated potentials
with the rule :mod:`plot_p_nom_max` or to summarize a particular technology's
full load hours in various countries with the rule :mod:`build_country_flh`.
with the rule :mod:`plot_p_nom_max`.
.. _attr:

View File

@ -1,45 +0,0 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
name: pypsa-eur-docs
channels:
- conda-forge
dependencies:
- python<=3.7
- pip
- pypsa>=0.17.1
- atlite=0.0.3
- pre-commit
# Dependencies of the workflow itself
- scikit-learn
- pycountry
- seaborn
- memory_profiler
- yaml
- pytables
- powerplantmatching>=0.4.3
# Second order dependencies which should really be deps of atlite
- xarray
- progressbar2
- pyyaml>=5.1.0
# GIS dependencies have to come all from conda-forge
- cartopy
- fiona
- proj
- pyshp
- geopandas
- rasterio
- shapely
- libgdal
- pip:
- vresutils==0.3.1
- git+https://github.com/PyPSA/glaes.git#egg=glaes
- git+https://github.com/PyPSA/geokit.git#egg=geokit
- cdsapi
- sphinx
- sphinx_rtd_theme

View File

@ -1,6 +1,6 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: CC0-1.0
name: pypsa-eur
channels:
@ -11,231 +11,300 @@ channels:
dependencies:
- _libgcc_mutex=0.1
- affine=2.3.0
- appdirs=1.4.3
- atlite=0.0.3
- attrs=19.3.0
- backcall=0.1.0
- beautifulsoup4=4.9.1
- blas=1.0
- blosc=1.16.3
- bokeh=2.0.2
- alsa-lib=1.2.3
- amply=0.1.4
- appdirs=1.4.4
- atlite=0.2.5
- attrs=21.2.0
- backcall=0.2.0
- backports=1.0
- backports.functools_lru_cache=1.6.4
- beautifulsoup4=4.10.0
- blosc=1.21.0
- bokeh=2.3.3
- boost-cpp=1.74.0
- bottleneck=1.3.2
- bzip2=1.0.8
- ca-certificates=2020.1.1
- cairo=1.14.12
- cartopy=0.17.0
- certifi=2020.4.5.1
- cffi=1.14.0
- c-ares=1.17.2
- ca-certificates=2021.5.30
- cairo=1.16.0
- cartopy=0.19.0.post1
- cdsapi=0.5.1
- certifi=2021.5.30
- cffi=1.14.6
- cfitsio=3.470
- cftime=1.1.2
- chardet=3.0.4
- cftime=1.5.0
- chardet=4.0.0
- charset-normalizer=2.0.0
- click=7.1.2
- click-plugins=1.1.1
- cligj=0.5.0
- cloudpickle=1.4.1
- cligj=0.7.2
- cloudpickle=2.0.0
- coincbc=2.10.5
- configargparse=1.1
- cryptography=2.9.2
- curl=7.67.0
- colorama=0.4.4
- conda=4.10.3
- conda-package-handling=1.7.3
- configargparse=1.5.2
- connection_pool=0.0.3
- country_converter=0.7.3
- cryptography=3.4.7
- curl=7.79.0
- cycler=0.10.0
- cytoolz=0.10.1
- dask=2.17.2
- dask-core=2.17.2
- cytoolz=0.11.0
- dask=2021.3.1
- dask-core=2021.3.1
- datrie=0.8.2
- dbus=1.13.14
- dbus=1.13.6
- decorator=4.4.2
- distributed=2.17.0
- docutils=0.16
- entsoe-py=0.2.10
- expat=2.2.6
- fiona=1.8.11
- fontconfig=2.13.0
- freetype=2.9.1
- freexl=1.0.5
- fsspec=0.7.4
- gdal=3.0.2
- geographiclib=1.50
- geopandas=0.6.1
- geopy=1.22.0
- geos=3.8.0
- geotiff=1.5.1
- giflib=5.1.4
- gitdb=4.0.2
- gitpython=3.1.1
- glib=2.63.1
- gst-plugins-base=1.14.0
- gstreamer=1.14.0
- gurobi=9.0.2
- hdf4=4.2.13
- hdf5=1.10.4
- deprecation=2.1.0
- descartes=1.1.0
- distributed=2021.4.1
- distro=1.5.0
- docutils=0.17.1
- entsoe-py=0.3.7
- et_xmlfile=1.0.1
- expat=2.4.1
- filelock=3.0.12
- fiona=1.8.18
- fontconfig=2.13.1
- freetype=2.10.4
- freexl=1.0.6
- fsspec=2021.8.1
- gdal=3.2.1
- geographiclib=1.52
- geopandas=0.9.0
- geopandas-base=0.9.0
- geopy=2.2.0
- geos=3.9.1
- geotiff=1.6.0
- gettext=0.19.8.1
- giflib=5.2.1
- gitdb=4.0.7
- gitpython=3.1.23
- glib=2.68.4
- glib-tools=2.68.4
- graphite2=1.3.13
- gst-plugins-base=1.18.5
- gstreamer=1.18.5
- harfbuzz=2.9.1
- hdf4=4.2.15
- hdf5=1.10.6
- heapdict=1.0.1
- icu=58.2
- idna=2.9
- importlib-metadata=1.6.0
- importlib_metadata=1.6.0
- intel-openmp=2020.1
- ipopt=3.13.2
- ipython=7.13.0
- icu=68.1
- idna=3.1
- importlib-metadata=4.8.1
- iniconfig=1.1.1
- ipython=7.27.0
- ipython_genutils=0.2.0
- jedi=0.17.0
- jinja2=2.11.2
- joblib=0.15.1
- jpeg=9b
- json-c=0.13.1
- jdcal=1.4.1
- jedi=0.18.0
- jinja2=3.0.1
- joblib=1.0.1
- jpeg=9d
- json-c=0.15
- jsonschema=3.2.0
- jupyter_core=4.6.3
- kealib=1.4.7
- kiwisolver=1.2.0
- krb5=1.16.4
- ld_impl_linux-64=2.33.1
- libblas=3.8.0
- libboost=1.67.0
- libcblas=3.8.0
- libcurl=7.67.0
- libdap4=3.19.1
- libedit=3.1.20181209
- libffi=3.3
- libgcc-ng=9.1.0
- libgdal=3.0.2
- libgfortran-ng=7.3.0
- jupyter_core=4.8.1
- kealib=1.4.14
- kiwisolver=1.3.2
- krb5=1.19.2
- lcms2=2.12
- ld_impl_linux-64=2.36.1
- libarchive=3.5.1
- libblas=3.9.0
- libcblas=3.9.0
- libclang=11.1.0
- libcurl=7.79.0
- libdap4=3.20.6
- libedit=3.1.20191231
- libev=4.33
- libevent=2.1.10
- libffi=3.4.2
- libgcc-ng=11.2.0
- libgdal=3.2.1
- libgfortran-ng=11.2.0
- libgfortran5=11.2.0
- libglib=2.68.4
- libgomp=11.2.0
- libiconv=1.16
- libkml=1.3.0
- liblapack=3.8.0
- libnetcdf=4.6.1
- liblapack=3.9.0
- libllvm11=11.1.0
- libnetcdf=4.7.4
- libnghttp2=1.43.0
- libogg=1.3.4
- libopenblas=0.3.17
- libopus=1.3.1
- libpng=1.6.37
- libpq=11.5
- libpq=13.3
- librttopo=1.1.0
- libsolv=0.7.19
- libspatialindex=1.9.3
- libspatialite=4.3.0a
- libssh2=1.9.0
- libstdcxx-ng=9.1.0
- libtiff=4.1.0
- libuuid=1.0.3
- libspatialite=5.0.1
- libssh2=1.10.0
- libstdcxx-ng=11.2.0
- libtiff=4.2.0
- libuuid=2.32.1
- libvorbis=1.3.7
- libwebp-base=1.2.1
- libxcb=1.13
- libxml2=2.9.9
- libxkbcommon=1.0.3
- libxml2=2.9.12
- libxslt=1.1.33
- locket=0.2.0
- lxml=4.5.0
- lz4-c=1.8.1.2
- lxml=4.6.3
- lz4-c=1.9.3
- lzo=2.10
- markupsafe=1.1.1
- matplotlib=3.1.3
- matplotlib-base=3.1.3
- memory_profiler=0.55.0
- metis=5.1.0
- mkl=2020.1
- mkl-service=2.3.0
- mkl_fft=1.0.15
- mkl_random=1.1.1
- mock=4.0.2
- more-itertools=8.3.0
- msgpack-python=1.0.0
- mamba=0.15.3
- mapclassify=2.4.3
- markupsafe=2.0.1
- matplotlib=3.4.3
- matplotlib-base=3.4.3
- matplotlib-inline=0.1.3
- memory_profiler=0.58.0
- mock=4.0.3
- more-itertools=8.10.0
- msgpack-python=1.0.2
- munch=2.5.0
- nbformat=5.0.6
- mysql-common=8.0.25
- mysql-libs=8.0.25
- nbformat=5.1.3
- ncurses=6.2
- netcdf4=1.4.2
- networkx=2.4
- nose=1.3.7
- numexpr=2.7.1
- numpy=1.18.1
- numpy-base=1.18.1
- netcdf4=1.5.6
- networkx=2.6.3
- nspr=4.30
- nss=3.69
- numexpr=2.7.3
- numpy=1.21.2
- olefile=0.46
- openjpeg=2.3.0
- openssl=1.1.1g
- owslib=0.19.2
- packaging=20.3
- pandas=1.0.3
- parso=0.7.0
- partd=1.1.0
- pcre=8.43
- openjdk=11.0.9.1
- openjpeg=2.4.0
- openpyxl=3.0.8
- openssl=1.1.1l
- packaging=21.0
- pandas=1.2.5
- parso=0.8.2
- partd=1.2.0
- patsy=0.5.1
- pcre=8.45
- pexpect=4.8.0
- pickleshare=0.7.5
- pillow=7.1.2
- pip=20.0.2
- pixman=0.38.0
- pluggy=0.13.1
- pillow=8.2.0
- pip=21.2.4
- pixman=0.40.0
- pluggy=1.0.0
- ply=3.11
- poppler=0.65.0
- poppler-data=0.4.9
- postgresql=11.5
- powerplantmatching=0.4.5
- progressbar2=3.37.1
- proj=6.2.1
- prompt-toolkit=3.0.5
- prompt_toolkit=3.0.5
- psutil=5.7.0
- ptyprocess=0.6.0
- py=1.8.1
- pycountry=19.8.18
- poppler=0.89.0
- poppler-data=0.4.11
- postgresql=13.3
- powerplantmatching=0.4.8
- progressbar2=3.53.1
- proj=7.2.0
- prompt-toolkit=3.0.20
- psutil=5.8.0
- pthread-stubs=0.4
- ptyprocess=0.7.0
- pulp=2.5.0
- py=1.10.0
- pycosat=0.6.3
- pycountry=20.7.3
- pycparser=2.20
- pyepsg=0.4.0
- pygments=2.6.1
- pykdtree=1.3.1
- pyomo=5.6.9
- pyopenssl=19.1.0
- pygments=2.10.0
- pyomo=6.1.2
- pyopenssl=20.0.1
- pyparsing=2.4.7
- pyproj=2.6.1.post1
- pypsa=0.17.0
- pyqt=5.9.2
- pyrsistent=0.16.0
- pyshp=2.1.0
- pyproj=3.1.0
- pypsa=0.18.0
- pyqt=5.12.3
- pyqt-impl=5.12.3
- pyqt5-sip=4.19.18
- pyqtchart=5.12
- pyqtwebengine=5.12.1
- pyrsistent=0.17.3
- pyshp=2.1.3
- pysocks=1.7.1
- pytables=3.6.1
- pytest=5.4.2
- pytest-runner=5.2
- python=3.7.7
- python-dateutil=2.8.1
- python-utils=2.3.0
- python_abi=3.7
- pytz=2020.1
- pyutilib=5.8.0
- pyyaml=5.3.1
- qt=5.9.7
- rasterio=1.1.0
- pytest=6.2.5
- python=3.9.7
- python-dateutil=2.8.2
- python-utils=2.5.6
- python_abi=3.9
- pytz=2021.1
- pyyaml=5.4.1
- qt=5.12.9
- rasterio=1.2.6
- ratelimiter=1.2.0
- readline=8.0
- requests=2.23.0
- rtree=0.9.4
- scikit-learn=0.22.1
- scipy=1.4.1
- seaborn=0.10.1
- setuptools=47.1.1
- shapely=1.7.0
- sip=4.19.8
- six=1.15.0
- smmap=3.0.2
- snakemake-minimal=5.19.2
- snappy=1.1.7
- readline=8.1
- reproc=14.2.3
- reproc-cpp=14.2.3
- requests=2.26.0
- rtree=0.9.7
- ruamel_yaml=0.15.80
- scikit-learn=0.24.2
- scipy=1.7.1
- seaborn=0.11.2
- seaborn-base=0.11.2
- setuptools=58.0.4
- setuptools-scm=6.3.2
- setuptools_scm=6.3.2
- shapely=1.7.1
- six=1.16.0
- smart_open=5.2.1
- smmap=3.0.5
- snakemake-minimal=6.8.0
- snuggs=1.4.7
- sortedcontainers=2.1.0
- sortedcontainers=2.4.0
- soupsieve=2.0.1
- sqlite=3.31.1
- tbb=2018.0.5
- tblib=1.6.0
- tiledb=1.6.3
- tk=8.6.8
- toolz=0.10.0
- toposort=1.5
- tornado=6.0.4
- traitlets=4.3.3
- typing_extensions=3.7.4.1
- tzcode=2020a
- urllib3=1.25.8
- wcwidth=0.1.9
- wheel=0.34.2
- sqlite=3.36.0
- statsmodels=0.12.2
- stopit=1.1.2
- tabula-py=2.2.0
- tabulate=0.8.9
- tblib=1.7.0
- threadpoolctl=2.2.0
- tiledb=2.2.9
- tk=8.6.11
- toml=0.10.2
- tomli=1.2.1
- toolz=0.11.1
- toposort=1.6
- tornado=6.1
- tqdm=4.62.3
- traitlets=5.1.0
- typing_extensions=3.10.0.2
- tzcode=2021a
- tzdata=2021a
- urllib3=1.26.6
- wcwidth=0.2.5
- wheel=0.37.0
- wrapt=1.12.1
- xarray=0.15.1
- xerces-c=3.2.2
- xlrd=1.2.0
- xarray=0.19.0
- xerces-c=3.2.3
- xlrd=2.0.1
- xorg-fixesproto=5.0
- xorg-inputproto=2.3.2
- xorg-kbproto=1.0.7
- xorg-libice=1.0.10
- xorg-libsm=1.2.3
- xorg-libx11=1.7.2
- xorg-libxau=1.0.9
- xorg-libxdmcp=1.1.3
- xorg-libxext=1.3.4
- xorg-libxfixes=5.0.3
- xorg-libxi=1.7.10
- xorg-libxrender=0.9.10
- xorg-libxtst=1.2.3
- xorg-recordproto=1.14.2
- xorg-renderproto=0.11.1
- xorg-xextproto=7.3.0
- xorg-xproto=7.0.31
- xz=5.2.5
- yaml=0.1.7
- zict=2.0.0
- zipp=3.1.0
- zipp=3.5.0
- zlib=1.2.11
- zstd=1.3.7
- zstd=1.4.9
- pip:
- cdsapi==0.2.7
- countrycode==0.2
- descartes==1.1.0
- geokit==1.1.2
- glaes==1.1.2
- tqdm==4.46.1
- sklearn==0.0
- tsam==1.1.1
- vresutils==0.3.1

View File

@ -1,25 +1,22 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
name: pypsa-eur
channels:
- conda-forge
- bioconda
- http://conda.anaconda.org/gurobi
dependencies:
- python
- python>=3.8
- pip
- pre-commit
- mamba # esp for windows build
- pypsa>=0.17.1
- atlite=0.0.3
- pypsa>=0.18.1
- atlite>=0.2.5
- dask
# Dependencies of the workflow itself
- xlrd
- openpyxl
- scikit-learn
- pycountry
- seaborn
- snakemake-minimal
@ -28,34 +25,34 @@ dependencies:
- pytables
- lxml
- powerplantmatching>=0.4.8
- numpy<=1.19.0 # otherwise macos fails
# Second order dependencies which should really be deps of atlite
- numpy
- pandas<1.3
- geopandas
- xarray
- netcdf4
- bottleneck
- toolz
- dask
- networkx
- scipy
- shapely
- progressbar2
- pyyaml>=5.1.0
- pyomo
- matplotlib
- proj
# Keep in conda environment when calling ipython
- ipython
# GIS dependencies:
- cartopy
- fiona
- proj
- pyshp
- geopandas
- rasterio
- shapely
- libgdal<=3.0.4
- descartes
- rasterio
# PyPSA-Eur-Sec Dependencies
- geopy
- tqdm
- pytz
- country_converter
- tabula-py
- pip:
- vresutils==0.3.1
- vresutils>=0.3.1
- tsam>=1.1.0
- git+https://github.com/PyPSA/glaes.git#egg=glaes
- git+https://github.com/PyPSA/geokit.git#egg=geokit
- cdsapi

View File

@ -1,6 +1,6 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
import pandas as pd
from pathlib import Path
@ -119,12 +119,20 @@ def load_network_for_plots(fn, tech_costs, config, combine_hydro_ps=True):
# bus_carrier = n.storage_units.bus.map(n.buses.carrier)
# n.storage_units.loc[bus_carrier == "heat","carrier"] = "water tanks"
Nyears = n.snapshot_weightings.sum() / 8760.
Nyears = n.snapshot_weightings.objective.sum() / 8760.
costs = load_costs(Nyears, tech_costs, config['costs'], config['electricity'])
update_transmission_costs(n, costs)
return n
def update_p_nom_max(n):
# if extendable carriers (solar/onwind/...) have capacity >= 0,
# e.g. existing assets from the OPSD project are included to the network,
# the installed capacity might exceed the expansion limit.
# Hence, we update the assumptions.
n.generators.p_nom_max = n.generators[['p_nom_min', 'p_nom_max']].max(1)
def aggregate_p_nom(n):
return pd.concat([
n.generators.groupby("carrier").p_nom_opt.sum(),
@ -156,7 +164,6 @@ def aggregate_p_curtailed(n):
])
def aggregate_costs(n, flatten=False, opts=None, existing_only=False):
from six import iterkeys, itervalues
components = dict(Link=("p_nom", "p0"),
Generator=("p_nom", "p"),
@ -167,8 +174,8 @@ def aggregate_costs(n, flatten=False, opts=None, existing_only=False):
costs = {}
for c, (p_nom, p_attr) in zip(
n.iterate_components(iterkeys(components), skip_empty=False),
itervalues(components)
n.iterate_components(components.keys(), skip_empty=False),
components.values()
):
if c.df.empty: continue
if not existing_only: p_nom += "_opt"
@ -233,7 +240,7 @@ def mock_snakemake(rulename, **wildcards):
if os.path.exists(p):
snakefile = p
break
workflow = sm.Workflow(snakefile)
workflow = sm.Workflow(snakefile, overwrite_configfiles=[])
workflow.include(snakefile)
workflow.global_resources = {}
rule = workflow.get_rule(rulename)

View File

@ -1,6 +1,6 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
# coding: utf-8
"""
@ -84,7 +84,7 @@ It further adds extendable ``generators`` with **zero** capacity for
"""
import logging
from _helpers import configure_logging
from _helpers import configure_logging, update_p_nom_max
import pypsa
import pandas as pd
@ -96,7 +96,6 @@ import powerplantmatching as pm
from powerplantmatching.export import map_country_bus
from vresutils.costdata import annuity
from vresutils.load import timeseries_opsd
from vresutils import transfer as vtransfer
idx = pd.IndexSlice
@ -119,12 +118,7 @@ def _add_missing_carriers_from_costs(n, costs, carriers):
n.import_components_from_dataframe(emissions, 'Carrier')
def load_costs(Nyears=1., tech_costs=None, config=None, elec_config=None):
if tech_costs is None:
tech_costs = snakemake.input.tech_costs
if config is None:
config = snakemake.config['costs']
def load_costs(tech_costs, config, elec_config, Nyears=1.):
# set all asset costs and other parameters
costs = pd.read_csv(tech_costs, index_col=list(range(3))).sort_index()
@ -170,8 +164,6 @@ def load_costs(Nyears=1., tech_costs=None, config=None, elec_config=None):
marginal_cost=0.,
co2_emissions=0.))
if elec_config is None:
elec_config = snakemake.config['electricity']
max_hours = elec_config['max_hours']
costs.loc["battery"] = \
costs_for_storage(costs.loc["battery storage"], costs.loc["battery inverter"],
@ -189,9 +181,7 @@ def load_costs(Nyears=1., tech_costs=None, config=None, elec_config=None):
return costs
def load_powerplants(ppl_fn=None):
if ppl_fn is None:
ppl_fn = snakemake.input.powerplants
def load_powerplants(ppl_fn):
carrier_dict = {'ocgt': 'OCGT', 'ccgt': 'CCGT', 'bioenergy': 'biomass',
'ccgt, thermal': 'CCGT', 'hard coal': 'coal'}
return (pd.read_csv(ppl_fn, index_col=0, dtype={'bus': 'str'})
@ -200,9 +190,10 @@ def load_powerplants(ppl_fn=None):
.replace({'carrier': carrier_dict}))
def attach_load(n):
def attach_load(n, regions, load, nuts3_shapes, countries, scaling=1.):
substation_lv_i = n.buses.index[n.buses['substation_lv']]
regions = (gpd.read_file(snakemake.input.regions).set_index('name')
regions = (gpd.read_file(regions).set_index('name')
.reindex(substation_lv_i))
# ------------- TO MERGE --------------
@ -230,15 +221,14 @@ def attach_load(n):
# Convert to naive UTC (has to be explicit since pandas 0.24)
opsd_load.index = opsd_load.index.tz_localize(None)
opsd_load = (pd.read_csv(snakemake.input.load, index_col=0, parse_dates=True)
.filter(items=snakemake.config['countries']))
opsd_load = (pd.read_csv(load, index_col=0, parse_dates=True)
.filter(items=countries))
scaling = snakemake.config.get('load', {}).get('scaling_factor', 1.0)
logger.info(f"Load data scaled with scalling factor {scaling}.")
opsd_load *= scaling
# ------------- TO MERGE --------------
nuts3 = gpd.read_file(snakemake.input.nuts3_shapes).set_index('index')
nuts3 = gpd.read_file(nuts3_shapes).set_index('index')
def upsample(cntry, group):
l = opsd_load[cntry]
@ -255,7 +245,6 @@ def attach_load(n):
# relative factors 0.6 and 0.4 have been determined from a linear
# regression on the country to continent load data
# (refer to vresutils.load._upsampling_weights)
factors = normed(0.6 * normed(gdp_n) + 0.4 * normed(pop_n))
return pd.DataFrame(factors.values * l.values[:,np.newaxis],
index=l.index, columns=factors.index)
@ -267,6 +256,9 @@ def attach_load(n):
def update_transmission_costs(n, costs, length_factor=1.0, simple_hvdc_costs=False):
# TODO: line length factor of lines is applied to lines and links.
# Separate the function to distinguish.
n.lines['capital_cost'] = (n.lines['length'] * length_factor *
costs.at['HVAC overhead', 'capital_cost'])
@ -291,18 +283,20 @@ def update_transmission_costs(n, costs, length_factor=1.0, simple_hvdc_costs=Fal
n.links.loc[dc_b, 'capital_cost'] = costs
def attach_wind_and_solar(n, costs):
for tech in snakemake.config['renewable']:
def attach_wind_and_solar(n, costs, input_profiles, technologies, line_length_factor=1):
# TODO: rename tech -> carrier, technologies -> carriers
for tech in technologies:
if tech == 'hydro': continue
n.add("Carrier", name=tech)
with xr.open_dataset(getattr(snakemake.input, 'profile_' + tech)) as ds:
with xr.open_dataset(getattr(input_profiles, 'profile_' + tech)) as ds:
if ds.indexes['bus'].empty: continue
suptech = tech.split('-', 2)[0]
if suptech == 'offwind':
underwater_fraction = ds['underwater_fraction'].to_pandas()
connection_cost = (snakemake.config['lines']['length_factor'] *
connection_cost = (line_length_factor *
ds['average_distance'].to_pandas() *
(underwater_fraction *
costs.at[tech + '-connection-submarine', 'capital_cost'] +
@ -328,8 +322,7 @@ def attach_wind_and_solar(n, costs):
p_max_pu=ds['profile'].transpose('time', 'bus').to_pandas())
def attach_conventional_generators(n, costs, ppl):
carriers = snakemake.config['electricity']['conventional_carriers']
def attach_conventional_generators(n, costs, ppl, carriers):
_add_missing_carriers_from_costs(n, costs, carriers)
@ -350,10 +343,7 @@ def attach_conventional_generators(n, costs, ppl):
logger.warning(f'Capital costs for conventional generators put to 0 EUR/MW.')
def attach_hydro(n, costs, ppl):
if 'hydro' not in snakemake.config['renewable']: return
c = snakemake.config['renewable']['hydro']
carriers = c.get('carriers', ['ror', 'PHS', 'hydro'])
def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **config):
_add_missing_carriers_from_costs(n, costs, carriers)
@ -369,11 +359,11 @@ def attach_hydro(n, costs, ppl):
if not inflow_idx.empty:
dist_key = ppl.loc[inflow_idx, 'p_nom'].groupby(country).transform(normed)
with xr.open_dataarray(snakemake.input.profile_hydro) as inflow:
with xr.open_dataarray(profile_hydro) as inflow:
inflow_countries = pd.Index(country[inflow_idx])
missing_c = (inflow_countries.unique()
.difference(inflow.indexes['countries']))
assert missing_c.empty, (f"'{snakemake.input.profile_hydro}' is missing "
assert missing_c.empty, (f"'{profile_hydro}' is missing "
f"inflow time-series for at least one country: {', '.join(missing_c)}")
inflow_t = (inflow.sel(countries=inflow_countries)
@ -398,7 +388,8 @@ def attach_hydro(n, costs, ppl):
if 'PHS' in carriers and not phs.empty:
# fill missing max hours to config value and
# assume no natural inflow due to lack of data
phs = phs.replace({'max_hours': {0: c['PHS_max_hours']}})
max_hours = config.get('PHS_max_hours', 6)
phs = phs.replace({'max_hours': {0: max_hours}})
n.madd('StorageUnit', phs.index,
carrier='PHS',
bus=phs['bus'],
@ -410,8 +401,11 @@ def attach_hydro(n, costs, ppl):
cyclic_state_of_charge=True)
if 'hydro' in carriers and not hydro.empty:
hydro_max_hours = c.get('hydro_max_hours')
hydro_stats = pd.read_csv(snakemake.input.hydro_capacities,
hydro_max_hours = config.get('hydro_max_hours')
assert hydro_max_hours is not None, "No path for hydro capacities given."
hydro_stats = pd.read_csv(hydro_capacities,
comment="#", na_values='-', index_col=0)
e_target = hydro_stats["E_store[TWh]"].clip(lower=0.2) * 1e6
e_installed = hydro.eval('p_nom * max_hours').groupby(hydro.country).sum()
@ -439,8 +433,7 @@ def attach_hydro(n, costs, ppl):
bus=hydro['bus'],
p_nom=hydro['p_nom'],
max_hours=hydro_max_hours,
capital_cost=(costs.at['hydro', 'capital_cost']
if c.get('hydro_capital_cost') else 0.),
capital_cost=costs.at['hydro', 'capital_cost'],
marginal_cost=costs.at['hydro', 'marginal_cost'],
p_max_pu=1., # dispatch
p_min_pu=0., # store
@ -450,9 +443,7 @@ def attach_hydro(n, costs, ppl):
inflow=inflow_t.loc[:, hydro.index])
def attach_extendable_generators(n, costs, ppl):
elec_opts = snakemake.config['electricity']
carriers = pd.Index(elec_opts['extendable_carriers']['Generator'])
def attach_extendable_generators(n, costs, ppl, carriers):
_add_missing_carriers_from_costs(n, costs, carriers)
@ -500,12 +491,11 @@ def attach_extendable_generators(n, costs, ppl):
def attach_OPSD_renewables(n):
def attach_OPSD_renewables(n, techs):
available = ['DE', 'FR', 'PL', 'CH', 'DK', 'CZ', 'SE', 'GB']
tech_map = {'Onshore': 'onwind', 'Offshore': 'offwind', 'Solar': 'solar'}
countries = set(available) & set(n.buses.country)
techs = snakemake.config['electricity'].get('renewable_capacities_from_OPSD', [])
tech_map = {k: v for k, v in tech_map.items() if v in techs}
if not tech_map:
@ -529,13 +519,11 @@ def attach_OPSD_renewables(n):
caps = caps / gens_per_bus.reindex(caps.index, fill_value=1)
n.generators.p_nom.update(gens.bus.map(caps).dropna())
n.generators.p_nom_min.update(gens.bus.map(caps).dropna())
def estimate_renewable_capacities(n, tech_map=None):
if tech_map is None:
tech_map = (snakemake.config['electricity']
.get('estimate_renewable_capacities_from_capacity_stats', {}))
def estimate_renewable_capacities(n, tech_map):
if len(tech_map) == 0: return
@ -564,10 +552,10 @@ def estimate_renewable_capacities(n, tech_map=None):
.groupby(n.generators.bus.map(n.buses.country))
.transform(lambda s: normed(s) * tech_capacities.at[s.name])
.where(lambda s: s>0.1, 0.)) # only capacities above 100kW
n.generators.loc[tech_i, 'p_nom_min'] = n.generators.loc[tech_i, 'p_nom']
def add_nice_carrier_names(n, config=None):
if config is None: config = snakemake.config
def add_nice_carrier_names(n, config):
carrier_i = n.carriers.index
nice_names = (pd.Series(config['plotting']['nice_names'])
.reindex(carrier_i).fillna(carrier_i.to_series().str.title()))
@ -575,11 +563,9 @@ def add_nice_carrier_names(n, config=None):
colors = pd.Series(config['plotting']['tech_colors']).reindex(carrier_i)
if colors.isna().any():
missing_i = list(colors.index[colors.isna()])
logger.warning(f'tech_colors for carriers {missing_i} not defined '
'in config.')
logger.warning(f'tech_colors for carriers {missing_i} not defined in config.')
n.carriers['color'] = colors
if __name__ == "__main__":
if 'snakemake' not in globals():
from _helpers import mock_snakemake
@ -588,27 +574,48 @@ if __name__ == "__main__":
n = pypsa.Network(snakemake.input.base_network)
year = snakemake.wildcards.year
snapshots = dict(start=year, end=str(int(year)+1), closed="left") if year else snakemake.config['snapshots']
weather_year = snakemake.wildcards.weather_year
if weather_year:
snapshots = dict(
start=weather_year,
end=str(int(weather_year)+1),
closed="left"
)
else:
snakemake.config['snapshots']
n.set_snapshots(pd.date_range(freq='h', **snapshots))
n.snapshot_weightings[:] *= 8760. / n.snapshot_weightings.sum()
Nyears = n.snapshot_weightings.sum() / 8760.
Nyears = n.snapshot_weightings.objective.sum() / 8760.
costs = load_costs(Nyears)
ppl = load_powerplants()
costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears)
ppl = load_powerplants(snakemake.input.powerplants)
attach_load(n)
attach_load(n, snakemake.input.regions, snakemake.input.load, snakemake.input.nuts3_shapes,
snakemake.config['countries'], snakemake.config['load']['scaling_factor'])
update_transmission_costs(n, costs)
update_transmission_costs(n, costs, snakemake.config['lines']['length_factor'])
attach_conventional_generators(n, costs, ppl)
attach_wind_and_solar(n, costs)
attach_hydro(n, costs, ppl)
attach_extendable_generators(n, costs, ppl)
carriers = snakemake.config['electricity']['conventional_carriers']
attach_conventional_generators(n, costs, ppl, carriers)
estimate_renewable_capacities(n)
attach_OPSD_renewables(n)
carriers = snakemake.config['renewable']
attach_wind_and_solar(n, costs, snakemake.input, carriers, snakemake.config['lines']['length_factor'])
add_nice_carrier_names(n)
if 'hydro' in snakemake.config['renewable']:
carriers = snakemake.config['renewable']['hydro'].pop('carriers', [])
attach_hydro(n, costs, ppl, snakemake.input.profile_hydro, snakemake.input.hydro_capacities,
carriers, **snakemake.config['renewable']['hydro'])
carriers = snakemake.config['electricity']['extendable_carriers']['Generator']
attach_extendable_generators(n, costs, ppl, carriers)
tech_map = snakemake.config['electricity'].get('estimate_renewable_capacities_from_capacity_stats', {})
estimate_renewable_capacities(n, tech_map)
techs = snakemake.config['electricity'].get('renewable_capacities_from_OPSD', [])
attach_OPSD_renewables(n, techs)
update_p_nom_max(n)
add_nice_carrier_names(n, snakemake.config)
n.export_to_netcdf(snakemake.output[0])

View File

@ -1,6 +1,6 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
# coding: utf-8
"""
@ -37,13 +37,13 @@ Inputs
Outputs
-------
- ``networks/elec{year}_s{simpl}_{clusters}_ec.nc``:
- ``networks/elec{weather_year}_s{simpl}_{clusters}_ec.nc``:
Description
-----------
The rule :mod:`add_extra_components` attaches additional extendable components to the clustered and simplified network. These can be configured in the ``config.yaml`` at ``electricity: extendable_carriers: ``. It processes ``networks/elec{year}_s{simpl}_{clusters}.nc`` to build ``networks/elec{year}_s{simpl}_{clusters}_ec.nc``, which in contrast to the former (depending on the configuration) contain with **zero** initial capacity
The rule :mod:`add_extra_components` attaches additional extendable components to the clustered and simplified network. These can be configured in the ``config.yaml`` at ``electricity: extendable_carriers: ``. It processes ``networks/elec{weather_year}_s{simpl}_{clusters}.nc`` to build ``networks/elec{weather_year}_s{simpl}_{clusters}_ec.nc``, which in contrast to the former (depending on the configuration) contain with **zero** initial capacity
- ``StorageUnits`` of carrier 'H2' and/or 'battery'. If this option is chosen, every bus is given an extendable ``StorageUnit`` of the corresponding carrier. The energy and power capacities are linked through a parameter that specifies the energy capacity as maximum hours at full dispatch power and is configured in ``electricity: max_hours:``. This linkage leads to one investment variable per storage unit. The default ``max_hours`` lead to long-term hydrogen and short-term battery storage units.
@ -64,8 +64,7 @@ idx = pd.IndexSlice
logger = logging.getLogger(__name__)
def attach_storageunits(n, costs):
elec_opts = snakemake.config['electricity']
def attach_storageunits(n, costs, elec_opts):
carriers = elec_opts['extendable_carriers']['StorageUnit']
max_hours = elec_opts['max_hours']
@ -89,8 +88,7 @@ def attach_storageunits(n, costs):
cyclic_state_of_charge=True)
def attach_stores(n, costs):
elec_opts = snakemake.config['electricity']
def attach_stores(n, costs, elec_opts):
carriers = elec_opts['extendable_carriers']['Store']
_add_missing_carriers_from_costs(n, costs, carriers)
@ -152,14 +150,11 @@ def attach_stores(n, costs):
bus1=buses_i,
carrier='battery discharger',
efficiency=costs.at['battery inverter','efficiency'],
capital_cost=costs.at['battery inverter', 'capital_cost'],
p_nom_extendable=True,
marginal_cost=costs.at["battery inverter", "marginal_cost"])
def attach_hydrogen_pipelines(n, costs):
elec_opts = snakemake.config['electricity']
def attach_hydrogen_pipelines(n, costs, elec_opts):
ext_carriers = elec_opts['extendable_carriers']
as_stores = ext_carriers.get('Store', [])
@ -199,15 +194,15 @@ if __name__ == "__main__":
configure_logging(snakemake)
n = pypsa.Network(snakemake.input.network)
Nyears = n.snapshot_weightings.sum() / 8760.
costs = load_costs(Nyears, tech_costs=snakemake.input.tech_costs,
config=snakemake.config['costs'],
elec_config=snakemake.config['electricity'])
elec_config = snakemake.config['electricity']
Nyears = n.snapshot_weightings.objective.sum() / 8760.
costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], elec_config, Nyears)
attach_storageunits(n, costs)
attach_stores(n, costs)
attach_hydrogen_pipelines(n, costs)
attach_storageunits(n, costs, elec_config)
attach_stores(n, costs, elec_config)
attach_hydrogen_pipelines(n, costs, elec_config)
add_nice_carrier_names(n, config=snakemake.config)
add_nice_carrier_names(n, snakemake.config)
n.export_to_netcdf(snakemake.output[0])

View File

@ -1,6 +1,6 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
# coding: utf-8
"""
@ -68,11 +68,10 @@ import yaml
import pandas as pd
import geopandas as gpd
import numpy as np
import scipy as sp
import networkx as nx
from scipy import spatial
from scipy.sparse import csgraph
from six import iteritems
from itertools import product
from shapely.geometry import Point, LineString
@ -96,11 +95,11 @@ def _get_country(df):
def _find_closest_links(links, new_links, distance_upper_bound=1.5):
treecoords = np.asarray([np.asarray(shapely.wkt.loads(s))[[0, -1]].flatten()
treecoords = np.asarray([np.asarray(shapely.wkt.loads(s).coords)[[0, -1]].flatten()
for s in links.geometry])
querycoords = np.vstack([new_links[['x1', 'y1', 'x2', 'y2']],
new_links[['x2', 'y2', 'x1', 'y1']]])
tree = sp.spatial.KDTree(treecoords)
tree = spatial.KDTree(treecoords)
dist, ind = tree.query(querycoords, distance_upper_bound=distance_upper_bound)
found_b = ind < len(links)
found_i = np.arange(len(new_links)*2)[found_b] % len(new_links)
@ -111,9 +110,9 @@ def _find_closest_links(links, new_links, distance_upper_bound=1.5):
.sort_index()['i']
def _load_buses_from_eg():
buses = (pd.read_csv(snakemake.input.eg_buses, quotechar="'",
true_values='t', false_values='f',
def _load_buses_from_eg(eg_buses, europe_shape, config_elec):
buses = (pd.read_csv(eg_buses, quotechar="'",
true_values=['t'], false_values=['f'],
dtype=dict(bus_id="str"))
.set_index("bus_id")
.drop(['station_id'], axis=1)
@ -123,19 +122,19 @@ def _load_buses_from_eg():
buses['under_construction'] = buses['under_construction'].fillna(False).astype(bool)
# remove all buses outside of all countries including exclusive economic zones (offshore)
europe_shape = gpd.read_file(snakemake.input.europe_shape).loc[0, 'geometry']
europe_shape = gpd.read_file(europe_shape).loc[0, 'geometry']
europe_shape_prepped = shapely.prepared.prep(europe_shape)
buses_in_europe_b = buses[['x', 'y']].apply(lambda p: europe_shape_prepped.contains(Point(p)), axis=1)
buses_with_v_nom_to_keep_b = buses.v_nom.isin(snakemake.config['electricity']['voltages']) | buses.v_nom.isnull()
logger.info("Removing buses with voltages {}".format(pd.Index(buses.v_nom.unique()).dropna().difference(snakemake.config['electricity']['voltages'])))
buses_with_v_nom_to_keep_b = buses.v_nom.isin(config_elec['voltages']) | buses.v_nom.isnull()
logger.info("Removing buses with voltages {}".format(pd.Index(buses.v_nom.unique()).dropna().difference(config_elec['voltages'])))
return pd.DataFrame(buses.loc[buses_in_europe_b & buses_with_v_nom_to_keep_b])
def _load_transformers_from_eg(buses):
transformers = (pd.read_csv(snakemake.input.eg_transformers, quotechar="'",
true_values='t', false_values='f',
def _load_transformers_from_eg(buses, eg_transformers):
transformers = (pd.read_csv(eg_transformers, quotechar="'",
true_values=['t'], false_values=['f'],
dtype=dict(transformer_id='str', bus0='str', bus1='str'))
.set_index('transformer_id'))
@ -144,9 +143,9 @@ def _load_transformers_from_eg(buses):
return transformers
def _load_converters_from_eg(buses):
converters = (pd.read_csv(snakemake.input.eg_converters, quotechar="'",
true_values='t', false_values='f',
def _load_converters_from_eg(buses, eg_converters):
converters = (pd.read_csv(eg_converters, quotechar="'",
true_values=['t'], false_values=['f'],
dtype=dict(converter_id='str', bus0='str', bus1='str'))
.set_index('converter_id'))
@ -157,8 +156,8 @@ def _load_converters_from_eg(buses):
return converters
def _load_links_from_eg(buses):
links = (pd.read_csv(snakemake.input.eg_links, quotechar="'", true_values='t', false_values='f',
def _load_links_from_eg(buses, eg_links):
links = (pd.read_csv(eg_links, quotechar="'", true_values=['t'], false_values=['f'],
dtype=dict(link_id='str', bus0='str', bus1='str', under_construction="bool"))
.set_index('link_id'))
@ -175,11 +174,11 @@ def _load_links_from_eg(buses):
return links
def _add_links_from_tyndp(buses, links):
links_tyndp = pd.read_csv(snakemake.input.links_tyndp)
def _add_links_from_tyndp(buses, links, links_tyndp, europe_shape):
links_tyndp = pd.read_csv(links_tyndp)
# remove all links from list which lie outside all of the desired countries
europe_shape = gpd.read_file(snakemake.input.europe_shape).loc[0, 'geometry']
europe_shape = gpd.read_file(europe_shape).loc[0, 'geometry']
europe_shape_prepped = shapely.prepared.prep(europe_shape)
x1y1_in_europe_b = links_tyndp[['x1', 'y1']].apply(lambda p: europe_shape_prepped.contains(Point(p)), axis=1)
x2y2_in_europe_b = links_tyndp[['x2', 'y2']].apply(lambda p: europe_shape_prepped.contains(Point(p)), axis=1)
@ -211,8 +210,9 @@ def _add_links_from_tyndp(buses, links):
if links_tyndp["j"].notnull().any():
logger.info("TYNDP links already in the dataset (skipping): " + ", ".join(links_tyndp.loc[links_tyndp["j"].notnull(), "Name"]))
links_tyndp = links_tyndp.loc[links_tyndp["j"].isnull()]
if links_tyndp.empty: return buses, links
tree = sp.spatial.KDTree(buses[['x', 'y']])
tree = spatial.KDTree(buses[['x', 'y']])
_, ind0 = tree.query(links_tyndp[["x1", "y1"]])
ind0_b = ind0 < len(buses)
links_tyndp.loc[ind0_b, "bus0"] = buses.index[ind0[ind0_b]]
@ -243,11 +243,13 @@ def _add_links_from_tyndp(buses, links):
links_tyndp.index = "T" + links_tyndp.index.astype(str)
return buses, links.append(links_tyndp, sort=True)
links = pd.concat([links, links_tyndp], sort=True)
return buses, links
def _load_lines_from_eg(buses):
lines = (pd.read_csv(snakemake.input.eg_lines, quotechar="'", true_values='t', false_values='f',
def _load_lines_from_eg(buses, eg_lines):
lines = (pd.read_csv(eg_lines, quotechar="'", true_values=['t'], false_values=['f'],
dtype=dict(line_id='str', bus0='str', bus1='str',
underground="bool", under_construction="bool"))
.set_index('line_id')
@ -260,19 +262,19 @@ def _load_lines_from_eg(buses):
return lines
def _apply_parameter_corrections(n):
with open(snakemake.input.parameter_corrections) as f:
def _apply_parameter_corrections(n, parameter_corrections):
with open(parameter_corrections) as f:
corrections = yaml.safe_load(f)
if corrections is None: return
for component, attrs in iteritems(corrections):
for component, attrs in corrections.items():
df = n.df(component)
oid = _get_oid(df)
if attrs is None: continue
for attr, repls in iteritems(attrs):
for i, r in iteritems(repls):
for attr, repls in attrs.items():
for i, r in repls.items():
if i == 'oid':
r = oid.map(repls["oid"]).dropna()
elif i == 'index':
@ -283,14 +285,14 @@ def _apply_parameter_corrections(n):
df.loc[inds, attr] = r[inds].astype(df[attr].dtype)
def _set_electrical_parameters_lines(lines):
v_noms = snakemake.config['electricity']['voltages']
linetypes = snakemake.config['lines']['types']
def _set_electrical_parameters_lines(lines, config):
v_noms = config['electricity']['voltages']
linetypes = config['lines']['types']
for v_nom in v_noms:
lines.loc[lines["v_nom"] == v_nom, 'type'] = linetypes[v_nom]
lines['s_max_pu'] = snakemake.config['lines']['s_max_pu']
lines['s_max_pu'] = config['lines']['s_max_pu']
return lines
@ -302,14 +304,14 @@ def _set_lines_s_nom_from_linetypes(n):
)
def _set_electrical_parameters_links(links):
def _set_electrical_parameters_links(links, config, links_p_nom):
if links.empty: return links
p_max_pu = snakemake.config['links'].get('p_max_pu', 1.)
p_max_pu = config['links'].get('p_max_pu', 1.)
links['p_max_pu'] = p_max_pu
links['p_min_pu'] = -p_max_pu
links_p_nom = pd.read_csv(snakemake.input.links_p_nom)
links_p_nom = pd.read_csv(links_p_nom)
# filter links that are not in operation anymore
removed_b = links_p_nom.Remarks.str.contains('Shut down|Replaced', na=False)
@ -329,8 +331,8 @@ def _set_electrical_parameters_links(links):
return links
def _set_electrical_parameters_converters(converters):
p_max_pu = snakemake.config['links'].get('p_max_pu', 1.)
def _set_electrical_parameters_converters(converters, config):
p_max_pu = config['links'].get('p_max_pu', 1.)
converters['p_max_pu'] = p_max_pu
converters['p_min_pu'] = -p_max_pu
@ -343,8 +345,8 @@ def _set_electrical_parameters_converters(converters):
return converters
def _set_electrical_parameters_transformers(transformers):
config = snakemake.config['transformers']
def _set_electrical_parameters_transformers(transformers, config):
config = config['transformers']
## Add transformer parameters
transformers["x"] = config.get('x', 0.1)
@ -371,7 +373,7 @@ def _remove_unconnected_components(network):
return network[component == component_sizes.index[0]]
def _set_countries_and_substations(n):
def _set_countries_and_substations(n, config, country_shapes, offshore_shapes):
buses = n.buses
@ -384,9 +386,9 @@ def _set_countries_and_substations(n):
index=buses.index
)
countries = snakemake.config['countries']
country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index('name')['geometry']
offshore_shapes = gpd.read_file(snakemake.input.offshore_shapes).set_index('name')['geometry']
countries = config['countries']
country_shapes = gpd.read_file(country_shapes).set_index('name')['geometry']
offshore_shapes = gpd.read_file(offshore_shapes).set_index('name')['geometry']
substation_b = buses['symbol'].str.contains('substation|converter station', case=False)
def prefer_voltage(x, which):
@ -496,19 +498,19 @@ def _replace_b2b_converter_at_country_border_by_link(n):
.format(i, b0, line, linkcntry.at[i], buscntry.at[b1]))
def _set_links_underwater_fraction(n):
def _set_links_underwater_fraction(n, offshore_shapes):
if n.links.empty: return
if not hasattr(n.links, 'geometry'):
n.links['underwater_fraction'] = 0.
else:
offshore_shape = gpd.read_file(snakemake.input.offshore_shapes).unary_union
offshore_shape = gpd.read_file(offshore_shapes).unary_union
links = gpd.GeoSeries(n.links.geometry.dropna().map(shapely.wkt.loads))
n.links['underwater_fraction'] = links.intersection(offshore_shape).length / links.length
def _adjust_capacities_of_under_construction_branches(n):
lines_mode = snakemake.config['lines'].get('under_construction', 'undef')
def _adjust_capacities_of_under_construction_branches(n, config):
lines_mode = config['lines'].get('under_construction', 'undef')
if lines_mode == 'zero':
n.lines.loc[n.lines.under_construction, 'num_parallel'] = 0.
n.lines.loc[n.lines.under_construction, 's_nom'] = 0.
@ -517,7 +519,7 @@ def _adjust_capacities_of_under_construction_branches(n):
elif lines_mode != 'keep':
logger.warning("Unrecognized configuration for `lines: under_construction` = `{}`. Keeping under construction lines.")
links_mode = snakemake.config['links'].get('under_construction', 'undef')
links_mode = config['links'].get('under_construction', 'undef')
if links_mode == 'zero':
n.links.loc[n.links.under_construction, "p_nom"] = 0.
elif links_mode == 'remove':
@ -532,28 +534,30 @@ def _adjust_capacities_of_under_construction_branches(n):
return n
def base_network():
buses = _load_buses_from_eg()
def base_network(eg_buses, eg_converters, eg_transformers, eg_lines, eg_links,
links_p_nom, links_tyndp, europe_shape, country_shapes, offshore_shapes,
parameter_corrections, config):
links = _load_links_from_eg(buses)
if snakemake.config['links'].get('include_tyndp'):
buses, links = _add_links_from_tyndp(buses, links)
buses = _load_buses_from_eg(eg_buses, europe_shape, config['electricity'])
converters = _load_converters_from_eg(buses)
links = _load_links_from_eg(buses, eg_links)
if config['links'].get('include_tyndp'):
buses, links = _add_links_from_tyndp(buses, links, links_tyndp, europe_shape)
lines = _load_lines_from_eg(buses)
transformers = _load_transformers_from_eg(buses)
converters = _load_converters_from_eg(buses, eg_converters)
lines = _set_electrical_parameters_lines(lines)
transformers = _set_electrical_parameters_transformers(transformers)
links = _set_electrical_parameters_links(links)
converters = _set_electrical_parameters_converters(converters)
lines = _load_lines_from_eg(buses, eg_lines)
transformers = _load_transformers_from_eg(buses, eg_transformers)
lines = _set_electrical_parameters_lines(lines, config)
transformers = _set_electrical_parameters_transformers(transformers, config)
links = _set_electrical_parameters_links(links, config, links_p_nom)
converters = _set_electrical_parameters_converters(converters, config)
n = pypsa.Network()
n.name = 'PyPSA-Eur'
n.set_snapshots(pd.date_range(freq='h', **snakemake.config['snapshots']))
n.snapshot_weightings[:] *= 8760. / n.snapshot_weightings.sum()
n.set_snapshots(pd.date_range(freq='h', **config['snapshots']))
n.import_components_from_dataframe(buses, "Bus")
n.import_components_from_dataframe(lines, "Line")
@ -563,17 +567,17 @@ def base_network():
_set_lines_s_nom_from_linetypes(n)
_apply_parameter_corrections(n)
_apply_parameter_corrections(n, parameter_corrections)
n = _remove_unconnected_components(n)
_set_countries_and_substations(n)
_set_countries_and_substations(n, config, country_shapes, offshore_shapes)
_set_links_underwater_fraction(n)
_set_links_underwater_fraction(n, offshore_shapes)
_replace_b2b_converter_at_country_border_by_link(n)
n = _adjust_capacities_of_under_construction_branches(n)
n = _adjust_capacities_of_under_construction_branches(n, config)
return n
@ -583,6 +587,8 @@ if __name__ == "__main__":
snakemake = mock_snakemake('base_network')
configure_logging(snakemake)
n = base_network()
n = base_network(snakemake.input.eg_buses, snakemake.input.eg_converters, snakemake.input.eg_transformers, snakemake.input.eg_lines, snakemake.input.eg_links,
snakemake.input.links_p_nom, snakemake.input.links_tyndp, snakemake.input.europe_shape, snakemake.input.country_shapes, snakemake.input.offshore_shapes,
snakemake.input.parameter_corrections, snakemake.config)
n.export_to_netcdf(snakemake.output[0])

View File

@ -1,6 +1,6 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
"""
Creates Voronoi shapes for each bus representing both onshore and offshore regions.

View File

@ -1,6 +1,6 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
# SPDX-FileCopyrightText: : 2017-2021 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
"""
Create cutouts with `atlite <https://atlite.readthedocs.io/en/latest/>`_.
@ -92,10 +92,11 @@ Description
"""
import logging
import atlite
import geopandas as gpd
import pandas as pd
from _helpers import configure_logging
import os
import atlite
logger = logging.getLogger(__name__)
@ -107,14 +108,24 @@ if __name__ == "__main__":
# TODO make it accept year independent config
cutout_params = snakemake.config['atlite']['cutouts'][snakemake.wildcards.cutout]
for p in ('xs', 'ys', 'years', 'months'):
if p in cutout_params:
cutout_params[p] = slice(*cutout_params[p])
cutout = atlite.Cutout(snakemake.wildcards.cutout,
cutout_dir=os.path.dirname(snakemake.output[0]),
**cutout_params)
snapshots = pd.date_range(freq='h', **snakemake.config['snapshots'])
time = [snapshots[0], snapshots[-1]]
cutout_params['time'] = slice(*cutout_params.get('time', time))
nprocesses = snakemake.config['atlite'].get('nprocesses', 4)
if {'x', 'y', 'bounds'}.isdisjoint(cutout_params):
# Determine the bounds from bus regions with a buffer of two grid cells
onshore = gpd.read_file(snakemake.input.regions_onshore)
offshore = gpd.read_file(snakemake.input.regions_offshore)
regions = onshore.append(offshore)
d = max(cutout_params.get('dx', 0.25), cutout_params.get('dy', 0.25))*2
cutout_params['bounds'] = regions.total_bounds + [-d, -d, d, d]
elif {'x', 'y'}.issubset(cutout_params):
cutout_params['x'] = slice(*cutout_params['x'])
cutout_params['y'] = slice(*cutout_params['y'])
cutout.prepare(nprocesses=nprocesses)
logging.info(f"Preparing cutout with parameters {cutout_params}.")
features = cutout_params.pop('features', None)
cutout = atlite.Cutout(snakemake.output[0], **cutout_params)
cutout.prepare(features=features)

View File

@ -2,7 +2,7 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
"""
Build hydroelectric inflow time-series for each country.
@ -62,7 +62,6 @@ Description
import logging
from _helpers import configure_logging
import os
import atlite
import geopandas as gpd
from vresutils import hydro as vhydro
@ -75,16 +74,12 @@ if __name__ == "__main__":
snakemake = mock_snakemake('build_hydro_profile')
configure_logging(snakemake)
year = snakemake.wildcards.year
config = snakemake.config['renewable']['hydro']
cutout_dir = os.path.dirname(snakemake.input.cutout)
cutout_config = config['cutout']
if year: cutout_config = cutout_config.format(year=year)
cutout = atlite.Cutout(cutout_config, cutout_dir=cutout_dir)
config_hydro = snakemake.config['renewable']['hydro']
cutout = atlite.Cutout(snakemake.input.cutout)
countries = snakemake.config['countries']
country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index('name')['geometry'].reindex(countries)
country_shapes = (gpd.read_file(snakemake.input.country_shapes)
.set_index('name')['geometry'].reindex(countries))
country_shapes.index.name = 'countries'
eia_stats = vhydro.get_eia_annual_hydro_generation(snakemake.input.eia_hydro_generation).reindex(columns=countries)
@ -97,7 +92,7 @@ if __name__ == "__main__":
lower_threshold_quantile=True,
normalize_using_yearly=eia_stats)
if 'clip_min_inflow' in config:
inflow.values[inflow.values < config['clip_min_inflow']] = 0.
if 'clip_min_inflow' in config_hydro:
inflow = inflow.where(inflow > config_hydro['clip_min_inflow'], 0)
inflow.to_netcdf(snakemake.output[0])

View File

@ -1,6 +1,6 @@
# SPDX-FileCopyrightText: : 2020 @JanFrederickUnnewehr, The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
"""
@ -14,7 +14,6 @@ Relevant Settings
snapshots:
load:
url:
interpolate_limit:
time_shift_for_large_gaps:
manual_adjustments:
@ -71,7 +70,7 @@ def load_timeseries(fn, years, countries, powerstatistics=True):
"""
logger.info(f"Retrieving load data from '{fn}'.")
pattern = 'power_statistics' if powerstatistics else '_transparency'
pattern = 'power_statistics' if powerstatistics else 'transparency'
pattern = f'_load_actual_entsoe_{pattern}'
rename = lambda s: s[:-len(pattern)]
date_parser = lambda x: dateutil.parser.parse(x, ignoretz=True)
@ -197,18 +196,16 @@ if __name__ == "__main__":
configure_logging(snakemake)
config = snakemake.config
powerstatistics = config['load']['power_statistics']
url = config['load']['url']
interpolate_limit = config['load']['interpolate_limit']
countries = config['countries']
snapshots = pd.date_range(freq='h', **config['snapshots'])
powerstatistics = snakemake.config['load']['power_statistics']
interpolate_limit = snakemake.config['load']['interpolate_limit']
countries = snakemake.config['countries']
snapshots = pd.date_range(freq='h', **snakemake.config['snapshots'])
years = slice(snapshots[0], snapshots[-1])
time_shift = config['load']['time_shift_for_large_gaps']
time_shift = snakemake.config['load']['time_shift_for_large_gaps']
load = load_timeseries(url, years, countries, powerstatistics)
load = load_timeseries(snakemake.input[0], years, countries, powerstatistics)
if config['load']['manual_adjustments']:
if snakemake.config['load']['manual_adjustments']:
load = manual_adjustment(load, powerstatistics)
logger.info(f"Linearly interpolate gaps of size {interpolate_limit} and less.")

View File

@ -1,6 +1,6 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
"""
Rasters the vector data of the `Natura 2000 <https://en.wikipedia.org/wiki/Natura_2000>`_ natural protection areas onto all cutout regions.
@ -40,33 +40,53 @@ Description
"""
import logging
from _helpers import configure_logging
from _helpers import configure_logging, retrieve_snakemake_keys
import atlite
import geokit as gk
from pathlib import Path
import geopandas as gpd
import rasterio as rio
from rasterio.features import geometry_mask
from rasterio.warp import transform_bounds
logger = logging.getLogger(__name__)
def determine_cutout_xXyY(cutout_name):
cutout = atlite.Cutout(cutout_name, cutout_dir=cutout_dir)
cutout = atlite.Cutout(cutout_name)
assert cutout.crs.to_epsg() == 4326
x, X, y, Y = cutout.extent
dx = (X - x) / (cutout.shape[1] - 1)
dy = (Y - y) / (cutout.shape[0] - 1)
dx, dy = cutout.dx, cutout.dy
return [x - dx/2., X + dx/2., y - dy/2., Y + dy/2.]
def get_transform_and_shape(bounds, res):
left, bottom = [(b // res)* res for b in bounds[:2]]
right, top = [(b // res + 1) * res for b in bounds[2:]]
shape = int((top - bottom) // res), int((right - left) / res)
transform = rio.Affine(res, 0, left, 0, -res, top)
return transform, shape
if __name__ == "__main__":
if 'snakemake' not in globals():
from _helpers import mock_snakemake
snakemake = mock_snakemake('build_natura_raster')
configure_logging(snakemake)
cutout_dir = Path(snakemake.input.cutouts[0]).parent.resolve()
cutout_names = {res['cutout'] for res in snakemake.config['renewable'].values()}
xs, Xs, ys, Ys = zip(*(determine_cutout_xXyY(cutout) for cutout in cutout_names))
xXyY = min(xs), max(Xs), min(ys), max(Ys)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
cutouts = paths.cutouts
xs, Xs, ys, Ys = zip(*(determine_cutout_xXyY(cutout) for cutout in cutouts))
bounds = transform_bounds(4326, 3035, min(xs), min(ys), max(Xs), max(Ys))
transform, out_shape = get_transform_and_shape(bounds, res=100)
# adjusted boundaries
shapes = gpd.read_file(paths.natura).to_crs(3035)
raster = ~geometry_mask(shapes.geometry, out_shape[::-1], transform)
raster = raster.astype(rio.uint8)
with rio.open(out[0], 'w', driver='GTiff', dtype=rio.uint8,
count=1, transform=transform, crs=3035, compress='lzw',
width=raster.shape[1], height=raster.shape[0]) as dst:
dst.write(raster, indexes=1)
natura = gk.vector.loadVector(snakemake.input.natura)
extent = gk.Extent.from_xXyY(xXyY).castTo(3035).fit(100)
extent.rasterize(natura, pixelWidth=100, pixelHeight=100, output=snakemake.output[0])

View File

@ -1,6 +1,6 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
# coding: utf-8
"""
@ -84,15 +84,14 @@ from scipy.spatial import cKDTree as KDTree
logger = logging.getLogger(__name__)
def add_custom_powerplants(ppl):
custom_ppl_query = snakemake.config['electricity']['custom_powerplants']
def add_custom_powerplants(ppl, custom_powerplants, custom_ppl_query=False):
if not custom_ppl_query:
return ppl
add_ppls = pd.read_csv(snakemake.input.custom_powerplants, index_col=0,
add_ppls = pd.read_csv(custom_powerplants, index_col=0,
dtype={'bus': 'str'})
if isinstance(custom_ppl_query, str):
add_ppls.query(custom_ppl_query, inplace=True)
return ppl.append(add_ppls, sort=False, ignore_index=True, verify_integrity=True)
return pd.concat([ppl, add_ppls], sort=False, ignore_index=True, verify_integrity=True)
if __name__ == "__main__":
@ -119,7 +118,9 @@ if __name__ == "__main__":
if isinstance(ppl_query, str):
ppl.query(ppl_query, inplace=True)
ppl = add_custom_powerplants(ppl) # add carriers from own powerplant files
# add carriers from own powerplant files:
custom_ppl_query = snakemake.config['electricity']['custom_powerplants']
ppl = add_custom_powerplants(ppl, snakemake.input.custom_powerplants, custom_ppl_query)
cntries_without_ppl = [c for c in countries if c not in ppl.Country.unique()]

View File

@ -2,7 +2,7 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
"""Calculates for each network node the
(i) installable capacity (based on land-use), (ii) the available generation time
@ -60,7 +60,6 @@ Inputs
**Source:** `GEBCO <https://www.gebco.net/data_and_products/images/gebco_2019_grid_image.jpg>`_
- ``resources/natura.tiff``: confer :ref:`natura`
- ``resources/country_shapes.geojson``: confer :ref:`shapes`
- ``resources/offshore_shapes.geojson``: confer :ref:`shapes`
- ``resources/regions_onshore.geojson``: (if not offshore wind), confer :ref:`busregions`
- ``resources/regions_offshore.geojson``: (if offshore wind), :ref:`busregions`
@ -180,224 +179,158 @@ node (`p_nom_max`): ``simple`` and ``conservative``:
reached.
"""
import progressbar as pgb
import geopandas as gpd
import xarray as xr
import numpy as np
import functools
import atlite
import logging
from pypsa.geo import haversine
from shapely.geometry import LineString
import time
from _helpers import configure_logging
import os
import atlite
import numpy as np
import xarray as xr
import pandas as pd
import multiprocessing as mp
import matplotlib.pyplot as plt
import progressbar as pgb
from scipy.sparse import csr_matrix, vstack
from pypsa.geo import haversine
from vresutils import landuse as vlanduse
from vresutils.array import spdiag
logger = logging.getLogger(__name__)
bounds = dx = dy = config = paths = gebco = clc = natura = None
def init_globals(bounds_xXyY, n_dx, n_dy, n_config, n_paths):
# Late import so that the GDAL Context is only created in the new processes
global gl, gk, gdal
import glaes as gl
import geokit as gk
from osgeo import gdal as gdal
# global in each process of the multiprocessing.Pool
global bounds, dx, dy, config, paths, gebco, clc, natura
bounds = gk.Extent.from_xXyY(bounds_xXyY)
dx = n_dx
dy = n_dy
config = n_config
paths = n_paths
if "max_depth" in config:
gebco = gk.raster.loadRaster(paths["gebco"])
gebco.SetProjection(gk.srs.loadSRS(4326).ExportToWkt())
clc = gk.raster.loadRaster(paths["corine"])
clc.SetProjection(gk.srs.loadSRS(3035).ExportToWkt())
natura = gk.raster.loadRaster(paths["natura"])
def downsample_to_coarse_grid(bounds, dx, dy, mask, data):
# The GDAL warp function with the 'average' resample algorithm needs a band of zero values of at least
# the size of one coarse cell around the original raster or it produces erroneous results
orig = mask.createRaster(data=data)
padded_extent = mask.extent.castTo(bounds.srs).pad(max(dx, dy)).castTo(mask.srs)
padded = padded_extent.fit((mask.pixelWidth, mask.pixelHeight)).warp(orig, mask.pixelWidth, mask.pixelHeight)
orig = None # free original raster
average = bounds.createRaster(dx, dy, dtype=gdal.GDT_Float32)
assert gdal.Warp(average, padded, resampleAlg='average') == 1, "gdal warp failed: %s" % gdal.GetLastErrorMsg()
return average
def calculate_potential(gid, save_map=None):
feature = gk.vector.extractFeature(paths["regions"], where=gid)
ec = gl.ExclusionCalculator(feature.geom)
corine = config.get("corine", {})
if isinstance(corine, list):
corine = {'grid_codes': corine}
if "grid_codes" in corine:
ec.excludeRasterType(clc, value=corine["grid_codes"], invert=True)
if corine.get("distance", 0.) > 0.:
ec.excludeRasterType(clc, value=corine["distance_grid_codes"], buffer=corine["distance"])
if config.get("natura", False):
ec.excludeRasterType(natura, value=1)
if "max_depth" in config:
ec.excludeRasterType(gebco, (None, -config["max_depth"]))
# TODO compute a distance field as a raster beforehand
if 'max_shore_distance' in config:
ec.excludeVectorType(paths["country_shapes"], buffer=config['max_shore_distance'], invert=True)
if 'min_shore_distance' in config:
ec.excludeVectorType(paths["country_shapes"], buffer=config['min_shore_distance'])
if save_map is not None:
ec.draw()
plt.savefig(save_map, transparent=True)
plt.close()
availability = downsample_to_coarse_grid(bounds, dx, dy, ec.region, np.where(ec.region.mask, ec._availability, 0))
return csr_matrix(gk.raster.extractMatrix(availability).flatten() / 100.)
if __name__ == '__main__':
if 'snakemake' not in globals():
from _helpers import mock_snakemake
snakemake = mock_snakemake('build_renewable_profiles', technology='solar')
configure_logging(snakemake)
pgb.streams.wrap_stderr()
year = snakemake.wildcards.year
nprocesses = snakemake.config['atlite'].get('nprocesses')
noprogress = not snakemake.config['atlite'].get('show_progress', True)
config = snakemake.config['renewable'][snakemake.wildcards.technology]
cutout_name = config['cutout']
if year: cutout_name = cutout_name.format(year=year)
snapshots = dict(start=year, end=str(int(year)+1), closed="left") if year else snakemake.config['snapshots']
time = pd.date_range(freq='m', **snapshots)
params = dict(years=slice(*time.year[[0, -1]]), months=slice(*time.month[[0, -1]]))
cutout = atlite.Cutout(cutout_name,
cutout_dir=os.path.dirname(snakemake.input.cutout),
**params)
minx, maxx, miny, maxy = cutout.extent
dx = (maxx - minx) / (cutout.shape[1] - 1)
dy = (maxy - miny) / (cutout.shape[0] - 1)
bounds_xXyY = (minx - dx/2., maxx + dx/2., miny - dy/2., maxy + dy/2.)
# Use GLAES to compute available potentials and the transition matrix
paths = dict(snakemake.input)
# Use the following for testing the default windows method on linux
# mp.set_start_method('spawn')
with mp.Pool(initializer=init_globals, initargs=(bounds_xXyY, dx, dy, config, paths),
maxtasksperchild=20, processes=snakemake.config['atlite'].get('nprocesses', 2)) as pool:
# The GDAL library creates a GDAL context on module import, which may not be shared over multiple
# processes or the PROJ4 library has a hickup, so we import only after forking.
import geokit as gk
regions = gk.vector.extractFeatures(paths["regions"], onlyAttr=True)
buses = pd.Index(regions['name'], name="bus")
widgets = [
pgb.widgets.Percentage(),
' ', pgb.widgets.SimpleProgress(format='(%s)' % pgb.widgets.SimpleProgress.DEFAULT_FORMAT),
' ', pgb.widgets.Bar(),
' ', pgb.widgets.Timer(),
' ', pgb.widgets.ETA()
]
progressbar = pgb.ProgressBar(prefix='Compute GIS potentials: ', widgets=widgets, max_value=len(regions))
matrix = vstack(list(progressbar(pool.imap(calculate_potential, regions.index))))
potentials = config['capacity_per_sqkm'] * vlanduse._cutout_cell_areas(cutout)
potmatrix = matrix * spdiag(potentials.ravel())
if not config.get('keep_all_available_areas', False):
potmatrix.data[potmatrix.data < 1.] = 0 # ignore weather cells where only less than 1 MW can be installed
potmatrix.eliminate_zeros()
resource = config['resource']
func = getattr(cutout, resource.pop('method'))
resource = config['resource'] # pv panel config / wind turbine config
correction_factor = config.get('correction_factor', 1.)
if correction_factor != 1.:
logger.warning('correction_factor is set as {}'.format(correction_factor))
capacity_factor = correction_factor * func(capacity_factor=True, show_progress='Compute capacity factors: ', **resource).stack(spatial=('y', 'x')).values
layoutmatrix = potmatrix * spdiag(capacity_factor)
profile, capacities = func(matrix=layoutmatrix, index=buses, per_unit=True,
return_capacity=True, show_progress='Compute profiles: ',
**resource)
capacity_per_sqkm = config['capacity_per_sqkm']
p_nom_max_meth = config.get('potential', 'conservative')
if p_nom_max_meth == 'simple':
p_nom_max = xr.DataArray(np.asarray(potmatrix.sum(axis=1)).squeeze(), [buses])
elif p_nom_max_meth == 'conservative':
# p_nom_max has to be calculated for each bus and is the minimal ratio
# (min over all weather grid cells of the bus region) between the available
# potential (potmatrix) and the used normalised layout (layoutmatrix /
# capacities), so we would like to calculate i.e. potmatrix / (layoutmatrix /
# capacities). Since layoutmatrix = potmatrix * capacity_factor, this
# corresponds to capacities/max(capacity factor in the voronoi cell)
p_nom_max = xr.DataArray([1./np.max(capacity_factor[inds]) if len(inds) else 0.
for inds in np.split(potmatrix.indices, potmatrix.indptr[1:-1])], [buses]) * capacities
if isinstance(config.get("corine", {}), list):
config['corine'] = {'grid_codes': config['corine']}
if correction_factor != 1.:
logger.info(f'correction_factor is set as {correction_factor}')
cutout = atlite.Cutout(snakemake.input['cutout'])
regions = gpd.read_file(snakemake.input.regions).set_index('name').rename_axis('bus')
buses = regions.index
excluder = atlite.ExclusionContainer(crs=3035, res=100)
if config['natura']:
excluder.add_raster(snakemake.input.natura, nodata=0, allow_no_overlap=True)
corine = config.get("corine", {})
if "grid_codes" in corine:
codes = corine["grid_codes"]
excluder.add_raster(snakemake.input.corine, codes=codes, invert=True, crs=3035)
if corine.get("distance", 0.) > 0.:
codes = corine["distance_grid_codes"]
buffer = corine["distance"]
excluder.add_raster(snakemake.input.corine, codes=codes, buffer=buffer, crs=3035)
if "max_depth" in config:
# lambda not supported for atlite + multiprocessing
# use named function np.greater with partially frozen argument instead
# and exclude areas where: -max_depth > grid cell depth
func = functools.partial(np.greater,-config['max_depth'])
excluder.add_raster(snakemake.input.gebco, codes=func, crs=4236, nodata=-1000)
if 'min_shore_distance' in config:
buffer = config['min_shore_distance']
excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer)
if 'max_shore_distance' in config:
buffer = config['max_shore_distance']
excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer, invert=True)
kwargs = dict(nprocesses=nprocesses, disable_progressbar=noprogress)
if noprogress:
logger.info('Calculate landuse availabilities...')
start = time.time()
availability = cutout.availabilitymatrix(regions, excluder, **kwargs)
duration = time.time() - start
logger.info(f'Completed availability calculation ({duration:2.2f}s)')
else:
raise AssertionError('Config key `potential` should be one of "simple" (default) or "conservative",'
' not "{}"'.format(p_nom_max_meth))
availability = cutout.availabilitymatrix(regions, excluder, **kwargs)
layout = xr.DataArray(np.asarray(potmatrix.sum(axis=0)).reshape(cutout.shape),
[cutout.meta.indexes[ax] for ax in ['y', 'x']])
area = cutout.grid.to_crs(3035).area / 1e6
area = xr.DataArray(area.values.reshape(cutout.shape),
[cutout.coords['y'], cutout.coords['x']])
# Determine weighted average distance from substation
cell_coords = cutout.grid_coordinates()
potential = capacity_per_sqkm * availability.sum('bus') * area
func = getattr(cutout, resource.pop('method'))
resource['dask_kwargs'] = {'num_workers': nprocesses}
capacity_factor = correction_factor * func(capacity_factor=True, **resource)
layout = capacity_factor * area * capacity_per_sqkm
profile, capacities = func(matrix=availability.stack(spatial=['y','x']),
layout=layout, index=buses,
per_unit=True, return_capacity=True, **resource)
logger.info(f"Calculating maximal capacity per bus (method '{p_nom_max_meth}')")
if p_nom_max_meth == 'simple':
p_nom_max = capacity_per_sqkm * availability @ area
elif p_nom_max_meth == 'conservative':
max_cap_factor = capacity_factor.where(availability!=0).max(['x', 'y'])
p_nom_max = capacities / max_cap_factor
else:
raise AssertionError('Config key `potential` should be one of "simple" '
f'(default) or "conservative", not "{p_nom_max_meth}"')
logger.info('Calculate average distances.')
layoutmatrix = (layout * availability).stack(spatial=['y','x'])
coords = cutout.grid[['x', 'y']]
bus_coords = regions[['x', 'y']]
average_distance = []
for i in regions.index:
row = layoutmatrix[i]
distances = haversine(regions.loc[i, ['x', 'y']], cell_coords[row.indices])[0]
average_distance.append((distances * (row.data / row.data.sum())).sum())
centre_of_mass = []
for bus in buses:
row = layoutmatrix.sel(bus=bus).data
nz_b = row != 0
row = row[nz_b]
co = coords[nz_b]
distances = haversine(bus_coords.loc[bus], co)
average_distance.append((distances * (row / row.sum())).sum())
centre_of_mass.append(co.values.T @ (row / row.sum()))
average_distance = xr.DataArray(average_distance, [buses])
centre_of_mass = xr.DataArray(centre_of_mass, [buses, ('spatial', ['x', 'y'])])
ds = xr.merge([(correction_factor * profile).rename('profile'),
capacities.rename('weight'),
p_nom_max.rename('p_nom_max'),
layout.rename('potential'),
average_distance.rename('average_distance')])
capacities.rename('weight'),
p_nom_max.rename('p_nom_max'),
potential.rename('potential'),
average_distance.rename('average_distance')])
if snakemake.wildcards.technology.startswith("offwind"):
import geopandas as gpd
from shapely.geometry import LineString
offshore_shape = gpd.read_file(snakemake.input.offshore_shapes).unary_union
logger.info('Calculate underwater fraction of connections.')
offshore_shape = gpd.read_file(snakemake.input['offshore_shapes']).unary_union
underwater_fraction = []
for i in regions.index:
row = layoutmatrix[i]
centre_of_mass = (cell_coords[row.indices] * (row.data / row.data.sum())[:,np.newaxis]).sum(axis=0)
line = LineString([centre_of_mass, regions.loc[i, ['x', 'y']]])
underwater_fraction.append(line.intersection(offshore_shape).length / line.length)
for bus in buses:
p = centre_of_mass.sel(bus=bus).data
line = LineString([p, regions.loc[bus, ['x', 'y']]])
frac = line.intersection(offshore_shape).length/line.length
underwater_fraction.append(frac)
ds['underwater_fraction'] = xr.DataArray(underwater_fraction, [buses])
# select only buses with some capacity and minimal capacity factor
ds = ds.sel(bus=((ds['profile'].mean('time') > config.get('min_p_max_pu', 0.)) &
(ds['p_nom_max'] > config.get('min_p_nom_max', 0.))))
(ds['p_nom_max'] > config.get('min_p_nom_max', 0.))))
if 'clip_p_max_pu' in config:
ds['profile'].values[ds['profile'].values < config['clip_p_max_pu']] = 0.
min_p_max_pu = config['clip_p_max_pu']
ds['profile'] = ds['profile'].where(ds['profile'] >= min_p_max_pu, 0)
ds.to_netcdf(snakemake.output.profile)

View File

@ -1,6 +1,6 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
"""
Creates GIS shape files of the countries, exclusive economic zones and `NUTS3 <https://en.wikipedia.org/wiki/Nomenclature_of_Territorial_Units_for_Statistics>`_ areas.
@ -34,8 +34,8 @@ Inputs
.. image:: ../img/nuts3.png
:scale: 33 %
- ``data/bundle/nama_10r_3popgdp.tsv.gz``: Average annual population by NUTS3 region (`eurostat <http://appsso.eurostat.ec.europa.eu/nui/show.do?dataset=nama_10r_3popgdp&lang=en>`_)
- ``data/bundle/nama_10r_3gdp.tsv.gz``: Gross domestic product (GDP) by NUTS 3 regions (`eurostat <http://appsso.eurostat.ec.europa.eu/nui/show.do?dataset=nama_10r_3gdp&lang=en>`_)
- ``data/bundle/nama_10r_3popgdp.tsv.gz``: Average annual population by NUTS3 region (`eurostat <http://appsso.eurostat.ec.europa.eu/nui/show.do?dataset=nama_10r_3popgdp&lang=en>`__)
- ``data/bundle/nama_10r_3gdp.tsv.gz``: Gross domestic product (GDP) by NUTS 3 regions (`eurostat <http://appsso.eurostat.ec.europa.eu/nui/show.do?dataset=nama_10r_3gdp&lang=en>`__)
- ``data/bundle/ch_cantons.csv``: Mapping between Swiss Cantons and NUTS3 regions
- ``data/bundle/je-e-21.03.02.xls``: Population and GDP data per Canton (`BFS - Swiss Federal Statistical Office <https://www.bfs.admin.ch/bfs/en/home/news/whats-new.assetdetail.7786557.html>`_ )
@ -73,13 +73,13 @@ from _helpers import configure_logging
import os
import numpy as np
from operator import attrgetter
from six.moves import reduce
from functools import reduce
from itertools import takewhile
import pandas as pd
import geopandas as gpd
from shapely.geometry import MultiPolygon, Polygon
from shapely.ops import cascaded_union
from shapely.ops import unary_union
import pycountry as pyc
logger = logging.getLogger(__name__)
@ -95,7 +95,7 @@ def _get_country(target, **keys):
def _simplify_polys(polys, minarea=0.1, tolerance=0.01, filterremote=True):
if isinstance(polys, MultiPolygon):
polys = sorted(polys, key=attrgetter('area'), reverse=True)
polys = sorted(polys.geoms, key=attrgetter('area'), reverse=True)
mainpoly = polys[0]
mainlength = np.sqrt(mainpoly.area/(2.*np.pi))
if mainpoly.area > minarea:
@ -107,26 +107,25 @@ def _simplify_polys(polys, minarea=0.1, tolerance=0.01, filterremote=True):
return polys.simplify(tolerance=tolerance)
def countries():
cntries = snakemake.config['countries']
if 'RS' in cntries: cntries.append('KV')
def countries(naturalearth, country_list):
if 'RS' in country_list: country_list.append('KV')
df = gpd.read_file(snakemake.input.naturalearth)
df = gpd.read_file(naturalearth)
# Names are a hassle in naturalearth, try several fields
fieldnames = (df[x].where(lambda s: s!='-99') for x in ('ISO_A2', 'WB_A2', 'ADM0_A3'))
df['name'] = reduce(lambda x,y: x.fillna(y), fieldnames, next(fieldnames)).str[0:2]
df = df.loc[df.name.isin(cntries) & ((df['scalerank'] == 0) | (df['scalerank'] == 5))]
df = df.loc[df.name.isin(country_list) & ((df['scalerank'] == 0) | (df['scalerank'] == 5))]
s = df.set_index('name')['geometry'].map(_simplify_polys)
if 'RS' in cntries: s['RS'] = s['RS'].union(s.pop('KV'))
if 'RS' in country_list: s['RS'] = s['RS'].union(s.pop('KV'))
return s
def eez(country_shapes):
df = gpd.read_file(snakemake.input.eez)
df = df.loc[df['ISO_3digit'].isin([_get_country('alpha_3', alpha_2=c) for c in snakemake.config['countries']])]
def eez(country_shapes, eez, country_list):
df = gpd.read_file(eez)
df = df.loc[df['ISO_3digit'].isin([_get_country('alpha_3', alpha_2=c) for c in country_list])]
df['name'] = df['ISO_3digit'].map(lambda c: _get_country('alpha_2', alpha_3=c))
s = df.set_index('name').geometry.map(lambda s: _simplify_polys(s, filterremote=False))
s = gpd.GeoSeries({k:v for k,v in s.iteritems() if v.distance(country_shapes[k]) < 1e-3})
@ -139,39 +138,41 @@ def country_cover(country_shapes, eez_shapes=None):
if eez_shapes is not None:
shapes += list(eez_shapes)
europe_shape = cascaded_union(shapes)
europe_shape = unary_union(shapes)
if isinstance(europe_shape, MultiPolygon):
europe_shape = max(europe_shape, key=attrgetter('area'))
return Polygon(shell=europe_shape.exterior)
def nuts3(country_shapes):
df = gpd.read_file(snakemake.input.nuts3)
def nuts3(country_shapes, nuts3, nuts3pop, nuts3gdp, ch_cantons, ch_popgdp):
df = gpd.read_file(nuts3)
df = df.loc[df['STAT_LEVL_'] == 3]
df['geometry'] = df['geometry'].map(_simplify_polys)
df = df.rename(columns={'NUTS_ID': 'id'})[['id', 'geometry']].set_index('id')
pop = pd.read_table(snakemake.input.nuts3pop, na_values=[':'], delimiter=' ?\t', engine='python')
pop = pd.read_table(nuts3pop, na_values=[':'], delimiter=' ?\t', engine='python')
pop = (pop
.set_index(pd.MultiIndex.from_tuples(pop.pop('unit,geo\\time').str.split(','))).loc['THS']
.applymap(lambda x: pd.to_numeric(x, errors='coerce'))
.fillna(method='bfill', axis=1))['2014']
gdp = pd.read_table(snakemake.input.nuts3gdp, na_values=[':'], delimiter=' ?\t', engine='python')
gdp = pd.read_table(nuts3gdp, na_values=[':'], delimiter=' ?\t', engine='python')
gdp = (gdp
.set_index(pd.MultiIndex.from_tuples(gdp.pop('unit,geo\\time').str.split(','))).loc['EUR_HAB']
.applymap(lambda x: pd.to_numeric(x, errors='coerce'))
.fillna(method='bfill', axis=1))['2014']
cantons = pd.read_csv(snakemake.input.ch_cantons)
cantons = pd.read_csv(ch_cantons)
cantons = cantons.set_index(cantons['HASC'].str[3:])['NUTS']
cantons = cantons.str.pad(5, side='right', fillchar='0')
swiss = pd.read_excel(snakemake.input.ch_popgdp, skiprows=3, index_col=0)
swiss = pd.read_excel(ch_popgdp, skiprows=3, index_col=0)
swiss.columns = swiss.columns.to_series().map(cantons)
pop = pop.append(pd.to_numeric(swiss.loc['Residents in 1000', 'CH040':]))
gdp = gdp.append(pd.to_numeric(swiss.loc['Gross domestic product per capita in Swiss francs', 'CH040':]))
swiss_pop = pd.to_numeric(swiss.loc['Residents in 1000', 'CH040':])
pop = pd.concat([pop, swiss_pop])
swiss_gdp = pd.to_numeric(swiss.loc['Gross domestic product per capita in Swiss francs', 'CH040':])
gdp = pd.concat([gdp, swiss_gdp])
df = df.join(pd.DataFrame(dict(pop=pop, gdp=gdp)))
@ -195,7 +196,7 @@ def nuts3(country_shapes):
manual['geometry'] = manual['country'].map(country_shapes)
manual = manual.dropna()
df = df.append(manual, sort=False)
df = pd.concat([df, manual], sort=False)
df.loc['ME000', 'pop'] = 650.
@ -218,16 +219,16 @@ if __name__ == "__main__":
snakemake = mock_snakemake('build_shapes')
configure_logging(snakemake)
out = snakemake.output
country_shapes = countries(snakemake.input.naturalearth, snakemake.config['countries'])
save_to_geojson(country_shapes, snakemake.output.country_shapes)
country_shapes = countries()
save_to_geojson(country_shapes, out.country_shapes)
offshore_shapes = eez(country_shapes)
save_to_geojson(offshore_shapes, out.offshore_shapes)
offshore_shapes = eez(country_shapes, snakemake.input.eez, snakemake.config['countries'])
save_to_geojson(offshore_shapes, snakemake.output.offshore_shapes)
europe_shape = country_cover(country_shapes, offshore_shapes)
save_to_geojson(gpd.GeoSeries(europe_shape), out.europe_shape)
save_to_geojson(gpd.GeoSeries(europe_shape), snakemake.output.europe_shape)
nuts3_shapes = nuts3(country_shapes)
save_to_geojson(nuts3_shapes, out.nuts3_shapes)
nuts3_shapes = nuts3(country_shapes, snakemake.input.nuts3, snakemake.input.nuts3pop,
snakemake.input.nuts3gdp, snakemake.input.ch_cantons, snakemake.input.ch_popgdp)
save_to_geojson(nuts3_shapes, snakemake.output.nuts3_shapes)

View File

@ -1,6 +1,6 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
# coding: utf-8
"""
@ -31,28 +31,28 @@ Relevant Settings
Inputs
------
- ``resources/regions_onshore_elec{year}_s{simpl}.geojson``: confer :ref:`simplify`
- ``resources/regions_offshore_elec{year}_s{simpl}.geojson``: confer :ref:`simplify`
- ``networks/elec{year}_s{simpl}.nc``: confer :ref:`simplify`
- ``resources/busmap_elec{year}_s{simpl}.csv``: confer :ref:`simplify`
- ``data/custom_busmap_elec{year}_s{simpl}_{clusters}.csv``: optional input
- ``resources/regions_onshore_elec{weather_year}_s{simpl}.geojson``: confer :ref:`simplify`
- ``resources/regions_offshore_elec{weather_year}_s{simpl}.geojson``: confer :ref:`simplify`
- ``networks/elec{weather_year}_s{simpl}.nc``: confer :ref:`simplify`
- ``resources/busmap_elec{weather_year}_s{simpl}.csv``: confer :ref:`simplify`
- ``data/custom_busmap_elec{weather_year}_s{simpl}_{clusters}.csv``: optional input
Outputs
-------
- ``resources/regions_onshore_elec{year}_s{simpl}_{clusters}.geojson``:
- ``resources/regions_onshore_elec{weather_year}_s{simpl}_{clusters}.geojson``:
.. image:: ../img/regions_onshore_elec_s_X.png
:scale: 33 %
- ``resources/regions_offshore_elec{year}_s{simpl}_{clusters}.geojson``:
- ``resources/regions_offshore_elec{weather_year}_s{simpl}_{clusters}.geojson``:
.. image:: ../img/regions_offshore_elec_s_X.png
:scale: 33 %
- ``resources/busmap_elec{year}_s{simpl}_{clusters}.csv``: Mapping of buses from ``networks/elec_s{simpl}.nc`` to ``networks/elec_s{simpl}_{clusters}.nc``;
- ``resources/linemap{year}_elec_s{simpl}_{clusters}.csv``: Mapping of lines from ``networks/elec_s{simpl}.nc`` to ``networks/elec_s{simpl}_{clusters}.nc``;
- ``networks/elec{year}_s{simpl}_{clusters}.nc``:
- ``resources/busmap_elec{weather_year}_s{simpl}_{clusters}.csv``: Mapping of buses from ``networks/elec_s{simpl}.nc`` to ``networks/elec_s{simpl}_{clusters}.nc``;
- ``resources/linemap{weather_year}_elec_s{simpl}_{clusters}.csv``: Mapping of lines from ``networks/elec_s{simpl}.nc`` to ``networks/elec_s{simpl}_{clusters}.nc``;
- ``networks/elec{weather_year}_s{simpl}_{clusters}.nc``:
.. image:: ../img/elec_s_X.png
:scale: 40 %
@ -122,7 +122,7 @@ Exemplary unsolved network clustered to 37 nodes:
"""
import logging
from _helpers import configure_logging
from _helpers import configure_logging, update_p_nom_max
import pypsa
import os
@ -135,11 +135,14 @@ import pyomo.environ as po
import matplotlib.pyplot as plt
import seaborn as sns
from six.moves import reduce
from functools import reduce
from pypsa.networkclustering import (busmap_by_kmeans, busmap_by_spectral_clustering,
_make_consense, get_clustering_from_busmap)
import warnings
warnings.filterwarnings(action='ignore', category=UserWarning)
from add_electricity import load_costs
idx = pd.IndexSlice
@ -170,12 +173,9 @@ def weighting_for_country(n, x):
return (w * (100. / w.max())).clip(lower=1.).astype(int)
def distribute_clusters(n, n_clusters, focus_weights=None, solver_name=None):
def distribute_clusters(n, n_clusters, focus_weights=None, solver_name="cbc"):
"""Determine the number of clusters per country"""
if solver_name is None:
solver_name = snakemake.config['solving']['solver']['name']
L = (n.loads_t.p_set.mean()
.groupby(n.loads.bus).sum()
.groupby([n.buses.country, n.buses.sub_network]).sum()
@ -218,7 +218,7 @@ def distribute_clusters(n, n_clusters, focus_weights=None, solver_name=None):
results = opt.solve(m)
assert results['Solver'][0]['Status'] == 'ok', f"Solver returned non-optimally: {results}"
return pd.Series(m.n.get_values(), index=L.index).astype(int)
return pd.Series(m.n.get_values(), index=L.index).round().astype(int)
def busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights=None, algorithm="kmeans", **algorithm_kwds):
@ -226,6 +226,7 @@ def busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights=None, algori
algorithm_kwds.setdefault('n_init', 1000)
algorithm_kwds.setdefault('max_iter', 30000)
algorithm_kwds.setdefault('tol', 1e-6)
algorithm_kwds.setdefault('random_state', 0)
n.determine_network_topology()
@ -268,12 +269,10 @@ def clustering_for_n_clusters(n, n_clusters, custom_busmap=False, aggregate_carr
else:
raise AttributeError(f"potential_mode should be one of 'simple' or 'conservative' but is '{potential_mode}'")
if custom_busmap:
busmap = pd.read_csv(snakemake.input.custom_busmap, index_col=0, squeeze=True)
busmap.index = busmap.index.astype(str)
logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}")
else:
if not custom_busmap:
busmap = busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights, algorithm)
else:
busmap = custom_busmap
clustering = get_clustering_from_busmap(
n, busmap,
@ -282,7 +281,7 @@ def clustering_for_n_clusters(n, n_clusters, custom_busmap=False, aggregate_carr
aggregate_generators_carriers=aggregate_carriers,
aggregate_one_ports=["Load", "StorageUnit"],
line_length_factor=line_length_factor,
generator_strategies={'p_nom_max': p_nom_max_strategy},
generator_strategies={'p_nom_max': p_nom_max_strategy, 'p_nom_min': np.sum},
scale_link_capital_costs=False)
if not n.links.empty:
@ -306,14 +305,12 @@ def save_to_geojson(s, fn):
def cluster_regions(busmaps, input=None, output=None):
if input is None: input = snakemake.input
if output is None: output = snakemake.output
busmap = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0])
for which in ('regions_onshore', 'regions_offshore'):
regions = gpd.read_file(getattr(input, which)).set_index('name')
geom_c = regions.geometry.groupby(busmap).apply(shapely.ops.cascaded_union)
geom_c = regions.geometry.groupby(busmap).apply(shapely.ops.unary_union)
regions_c = gpd.GeoDataFrame(dict(geometry=geom_c))
regions_c.index.name = 'name'
save_to_geojson(regions_c, getattr(output, which))
@ -357,10 +354,9 @@ if __name__ == "__main__":
clustering = pypsa.networkclustering.Clustering(n, busmap, linemap, linemap, pd.Series(dtype='O'))
else:
line_length_factor = snakemake.config['lines']['length_factor']
hvac_overhead_cost = (load_costs(n.snapshot_weightings.sum()/8760,
tech_costs=snakemake.input.tech_costs,
config=snakemake.config['costs'],
elec_config=snakemake.config['electricity'])
Nyears = n.snapshot_weightings.objective.sum()/8760
hvac_overhead_cost = (load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears)
.at['HVAC overhead', 'capital_cost'])
def consense(x):
@ -372,15 +368,20 @@ if __name__ == "__main__":
potential_mode = consense(pd.Series([snakemake.config['renewable'][tech]['potential']
for tech in renewable_carriers]))
custom_busmap = snakemake.config["enable"].get("custom_busmap", False)
clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap, aggregate_carriers,
line_length_factor=line_length_factor,
potential_mode=potential_mode,
solver_name=snakemake.config['solving']['solver']['name'],
extended_link_costs=hvac_overhead_cost,
focus_weights=focus_weights)
if custom_busmap:
custom_busmap = pd.read_csv(snakemake.input.custom_busmap, index_col=0, squeeze=True)
custom_busmap.index = custom_busmap.index.astype(str)
logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}")
clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap, aggregate_carriers,
line_length_factor, potential_mode,
snakemake.config['solving']['solver']['name'],
"kmeans", hvac_overhead_cost, focus_weights)
update_p_nom_max(n)
clustering.network.export_to_netcdf(snakemake.output.network)
for attr in ('busmap', 'linemap'): #also available: linemap_positive, linemap_negative
getattr(clustering, attr).to_csv(snakemake.output[attr])
cluster_regions((clustering.busmap,))
cluster_regions((clustering.busmap,), snakemake.input, snakemake.output)

View File

@ -1,6 +1,6 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
"""
Creates summaries of aggregated energy and costs as ``.csv`` files.
@ -54,13 +54,12 @@ Replacing '/summaries/' with '/plots/' creates nice colored maps of the results.
"""
import logging
from _helpers import configure_logging
from _helpers import configure_logging, retrieve_snakemake_keys
import os
import pypsa
import pandas as pd
from six import iteritems
from add_electricity import load_costs, update_transmission_costs
idx = pd.IndexSlice
@ -112,15 +111,15 @@ def calculate_costs(n, label, costs):
costs.loc[idx[raw_index],label] = capital_costs_grouped.values
if c.name == "Link":
p = c.pnl.p0.multiply(n.snapshot_weightings,axis=0).sum()
p = c.pnl.p0.multiply(n.snapshot_weightings.generators,axis=0).sum()
elif c.name == "Line":
continue
elif c.name == "StorageUnit":
p_all = c.pnl.p.multiply(n.snapshot_weightings,axis=0)
p_all = c.pnl.p.multiply(n.snapshot_weightings.generators,axis=0)
p_all[p_all < 0.] = 0.
p = p_all.sum()
else:
p = c.pnl.p.multiply(n.snapshot_weightings,axis=0).sum()
p = c.pnl.p.multiply(n.snapshot_weightings.generators,axis=0).sum()
marginal_costs = p*c.df.marginal_cost
@ -145,10 +144,12 @@ def calculate_energy(n, label, energy):
for c in n.iterate_components(n.one_port_components|n.branch_components):
if c.name in n.one_port_components:
c_energies = c.pnl.p.multiply(n.snapshot_weightings,axis=0).sum().multiply(c.df.sign).groupby(c.df.carrier).sum()
if c.name in {'Generator', 'Load', 'ShuntImpedance'}:
c_energies = c.pnl.p.multiply(n.snapshot_weightings.generators,axis=0).sum().multiply(c.df.sign).groupby(c.df.carrier).sum()
elif c.name in {'StorageUnit', 'Store'}:
c_energies = c.pnl.p.multiply(n.snapshot_weightings.stores,axis=0).sum().multiply(c.df.sign).groupby(c.df.carrier).sum()
else:
c_energies = (-c.pnl.p1.multiply(n.snapshot_weightings,axis=0).sum() - c.pnl.p0.multiply(n.snapshot_weightings,axis=0).sum()).groupby(c.df.carrier).sum()
c_energies = (-c.pnl.p1.multiply(n.snapshot_weightings.generators,axis=0).sum() - c.pnl.p0.multiply(n.snapshot_weightings.generators,axis=0).sum()).groupby(c.df.carrier).sum()
energy = include_in_summary(energy, [c.list_name], label, c_energies)
@ -377,7 +378,7 @@ outputs = ["costs",
]
def make_summaries(networks_dict, country='all'):
def make_summaries(networks_dict, paths, config, country='all'):
columns = pd.MultiIndex.from_tuples(networks_dict.keys(),names=["simpl","clusters","ll","opts"])
@ -386,7 +387,7 @@ def make_summaries(networks_dict, country='all'):
for output in outputs:
dfs[output] = pd.DataFrame(columns=columns,dtype=float)
for label, filename in iteritems(networks_dict):
for label, filename in networks_dict.items():
print(label, filename)
if not os.path.exists(filename):
print("does not exist!!")
@ -401,9 +402,8 @@ def make_summaries(networks_dict, country='all'):
if country != 'all':
n = n[n.buses.country == country]
Nyears = n.snapshot_weightings.sum() / 8760.
costs = load_costs(Nyears, snakemake.input[0],
snakemake.config['costs'], snakemake.config['electricity'])
Nyears = n.snapshot_weightings.objective.sum() / 8760.
costs = load_costs(paths[0], config['costs'], config['electricity'], Nyears)
update_transmission_costs(n, costs, simple_hvdc_costs=False)
assign_carriers(n)
@ -414,10 +414,9 @@ def make_summaries(networks_dict, country='all'):
return dfs
def to_csv(dfs):
dir = snakemake.output[0]
def to_csv(dfs, dir):
os.makedirs(dir, exist_ok=True)
for key, df in iteritems(dfs):
for key, df in dfs.items():
df.to_csv(os.path.join(dir, f"{key}.csv"))
@ -431,25 +430,27 @@ if __name__ == "__main__":
network_dir = os.path.join('results', 'networks')
configure_logging(snakemake)
def expand_from_wildcard(key):
w = getattr(snakemake.wildcards, key)
return snakemake.config["scenario"][key] if w == "all" else [w]
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
if snakemake.wildcards.ll.endswith("all"):
ll = snakemake.config["scenario"]["ll"]
if len(snakemake.wildcards.ll) == 4:
ll = [l for l in ll if l[0] == snakemake.wildcards.ll[0]]
def expand_from_wildcard(key, config):
w = getattr(wildcards, key)
return config["scenario"][key] if w == "all" else [w]
if wildcards.ll.endswith("all"):
ll = config["scenario"]["ll"]
if len(wildcards.ll) == 4:
ll = [l for l in ll if l[0] == wildcards.ll[0]]
else:
ll = [snakemake.wildcards.ll]
ll = [wildcards.ll]
networks_dict = {(simpl,clusters,l,opts) :
os.path.join(network_dir, f'{snakemake.wildcards.network}_s{simpl}_'
os.path.join(network_dir, f'elec_s{simpl}_'
f'{clusters}_ec_l{l}_{opts}.nc')
for simpl in expand_from_wildcard("simpl")
for clusters in expand_from_wildcard("clusters")
for simpl in expand_from_wildcard("simpl", config)
for clusters in expand_from_wildcard("clusters", config)
for l in ll
for opts in expand_from_wildcard("opts")}
for opts in expand_from_wildcard("opts", config)}
dfs = make_summaries(networks_dict, country=snakemake.wildcards.country)
dfs = make_summaries(networks_dict, paths, config, country=wildcards.country)
to_csv(dfs)
to_csv(dfs, out[0])

View File

@ -1,6 +1,6 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
"""
Plots map with pie charts and cost box bar charts.
@ -20,12 +20,11 @@ Description
"""
import logging
from _helpers import (load_network_for_plots, aggregate_p, aggregate_costs,
configure_logging)
from _helpers import (retrieve_snakemake_keys, load_network_for_plots,
aggregate_p, aggregate_costs, configure_logging)
import pandas as pd
import numpy as np
from six.moves import zip
import cartopy.crs as ccrs
import matplotlib.pyplot as plt
@ -89,36 +88,43 @@ def plot_map(n, ax=None, attribute='p_nom', opts={}):
# bus_sizes = n.generators_t.p.sum().loc[n.generators.carrier == "load"].groupby(n.generators.bus).sum()
bus_sizes = pd.concat((n.generators.query('carrier != "load"').groupby(['bus', 'carrier']).p_nom_opt.sum(),
n.storage_units.groupby(['bus', 'carrier']).p_nom_opt.sum()))
line_widths_exp = dict(Line=n.lines.s_nom_opt, Link=n.links.p_nom_opt)
line_widths_cur = dict(Line=n.lines.s_nom_min, Link=n.links.p_nom_min)
line_widths_exp = n.lines.s_nom_opt
line_widths_cur = n.lines.s_nom_min
link_widths_exp = n.links.p_nom_opt
link_widths_cur = n.links.p_nom_min
else:
raise 'plotting of {} has not been implemented yet'.format(attribute)
line_colors_with_alpha = \
dict(Line=(line_widths_cur['Line'] / n.lines.s_nom > 1e-3)
.map({True: line_colors['cur'], False: to_rgba(line_colors['cur'], 0.)}),
Link=(line_widths_cur['Link'] / n.links.p_nom > 1e-3)
((line_widths_cur / n.lines.s_nom > 1e-3)
.map({True: line_colors['cur'], False: to_rgba(line_colors['cur'], 0.)}))
link_colors_with_alpha = \
((link_widths_cur / n.links.p_nom > 1e-3)
.map({True: line_colors['cur'], False: to_rgba(line_colors['cur'], 0.)}))
## FORMAT
linewidth_factor = opts['map'][attribute]['linewidth_factor']
bus_size_factor = opts['map'][attribute]['bus_size_factor']
## PLOT
n.plot(line_widths=pd.concat(line_widths_exp)/linewidth_factor,
line_colors=dict(Line=line_colors['exp'], Link=line_colors['exp']),
n.plot(line_widths=line_widths_exp/linewidth_factor,
link_widths=link_widths_exp/linewidth_factor,
line_colors=line_colors['exp'],
link_colors=line_colors['exp'],
bus_sizes=bus_sizes/bus_size_factor,
bus_colors=tech_colors,
boundaries=map_boundaries,
geomap=True,
color_geomap=True, geomap=True,
ax=ax)
n.plot(line_widths=pd.concat(line_widths_cur)/linewidth_factor,
line_colors=pd.concat(line_colors_with_alpha),
n.plot(line_widths=line_widths_cur/linewidth_factor,
link_widths=link_widths_cur/linewidth_factor,
line_colors=line_colors_with_alpha,
link_colors=link_colors_with_alpha,
bus_sizes=0,
bus_colors=tech_colors,
boundaries=map_boundaries,
geomap=False,
color_geomap=True, geomap=False,
ax=ax)
ax.set_aspect('equal')
ax.axis('off')
@ -139,7 +145,7 @@ def plot_map(n, ax=None, attribute='p_nom', opts={}):
loc="upper left", bbox_to_anchor=(0.24, 1.01),
frameon=False,
labelspacing=0.8, handletextpad=1.5,
title='Transmission Exist./Exp. ')
title='Transmission Exp./Exist. ')
ax.add_artist(l1_1)
handles = []
@ -197,7 +203,7 @@ def plot_total_energy_pie(n, ax=None):
def plot_total_cost_bar(n, ax=None):
if ax is None: ax = plt.gca()
total_load = (n.snapshot_weightings * n.loads_t.p.sum(axis=1)).sum()
total_load = (n.snapshot_weightings.generators * n.loads_t.p.sum(axis=1)).sum()
tech_colors = opts['tech_colors']
def split_costs(n):
@ -253,18 +259,19 @@ if __name__ == "__main__":
set_plot_style()
opts = snakemake.config['plotting']
map_figsize = opts['map']['figsize']
map_boundaries = opts['map']['boundaries']
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
n = load_network_for_plots(snakemake.input.network, snakemake.input.tech_costs, snakemake.config)
map_figsize = config['map']['figsize']
map_boundaries = config['map']['boundaries']
scenario_opts = snakemake.wildcards.opts.split('-')
n = load_network_for_plots(paths.network, paths.tech_costs, config)
scenario_opts = wildcards.opts.split('-')
fig, ax = plt.subplots(figsize=map_figsize, subplot_kw={"projection": ccrs.PlateCarree()})
plot_map(n, ax, snakemake.wildcards.attr, opts)
plot_map(n, ax, wildcards.attr, config)
fig.savefig(snakemake.output.only_map, dpi=150, bbox_inches='tight')
fig.savefig(out.only_map, dpi=150, bbox_inches='tight')
ax1 = fig.add_axes([-0.115, 0.625, 0.2, 0.2])
plot_total_energy_pie(n, ax1)
@ -272,12 +279,12 @@ if __name__ == "__main__":
ax2 = fig.add_axes([-0.075, 0.1, 0.1, 0.45])
plot_total_cost_bar(n, ax2)
ll = snakemake.wildcards.ll
ll = wildcards.ll
ll_type = ll[0]
ll_factor = ll[1:]
lbl = dict(c='line cost', v='line volume')[ll_type]
amnt = '{ll} x today\'s'.format(ll=ll_factor) if ll_factor != 'opt' else 'optimal'
fig.suptitle('Expansion to {amount} {label} at {clusters} clusters'
.format(amount=amnt, label=lbl, clusters=snakemake.wildcards.clusters))
.format(amount=amnt, label=lbl, clusters=wildcards.clusters))
fig.savefig(snakemake.output.ext, transparent=True, bbox_inches='tight')
fig.savefig(out.ext, transparent=True, bbox_inches='tight')

View File

@ -1,6 +1,6 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
"""
Plots renewable installation potentials per capacity factor.
@ -19,7 +19,7 @@ Description
"""
import logging
from _helpers import configure_logging
from _helpers import configure_logging, retrieve_snakemake_keys
import pypsa
import pandas as pd
@ -53,11 +53,13 @@ if __name__ == "__main__":
clusts= '5,full', country= 'all')
configure_logging(snakemake)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
plot_kwds = dict(drawstyle="steps-post")
clusters = snakemake.wildcards.clusts.split(',')
techs = snakemake.wildcards.techs.split(',')
country = snakemake.wildcards.country
clusters = wildcards.clusts.split(',')
techs = wildcards.techs.split(',')
country = wildcards.country
if country == 'all':
country = None
else:
@ -66,7 +68,7 @@ if __name__ == "__main__":
fig, axes = plt.subplots(1, len(techs))
for j, cluster in enumerate(clusters):
net = pypsa.Network(snakemake.input[j])
net = pypsa.Network(paths[j])
for i, tech in enumerate(techs):
cum_p_nom_max(net, tech, country).plot(x="p_max_pu", y="cum_p_nom_max",
@ -79,4 +81,4 @@ if __name__ == "__main__":
plt.legend(title="Cluster level")
fig.savefig(snakemake.output[0], transparent=True, bbox_inches='tight')
fig.savefig(out[0], transparent=True, bbox_inches='tight')

View File

@ -1,6 +1,6 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
"""
Plots energy and cost summaries for solved networks.
@ -21,7 +21,7 @@ Description
import os
import logging
from _helpers import configure_logging
from _helpers import configure_logging, retrieve_snakemake_keys
import pandas as pd
import matplotlib.pyplot as plt
@ -55,7 +55,7 @@ def rename_techs(label):
preferred_order = pd.Index(["transmission lines","hydroelectricity","hydro reservoir","run of river","pumped hydro storage","onshore wind","offshore wind ac", "offshore wind dc","solar PV","solar thermal","OCGT","hydrogen storage","battery storage"])
def plot_costs(infn, fn=None):
def plot_costs(infn, config, fn=None):
## For now ignore the simpl header
cost_df = pd.read_csv(infn,index_col=list(range(3)),header=[1,2,3])
@ -67,7 +67,7 @@ def plot_costs(infn, fn=None):
df = df.groupby(df.index.map(rename_techs)).sum()
to_drop = df.index[df.max(axis=1) < snakemake.config['plotting']['costs_threshold']]
to_drop = df.index[df.max(axis=1) < config['plotting']['costs_threshold']]
print("dropping")
@ -84,7 +84,7 @@ def plot_costs(infn, fn=None):
fig, ax = plt.subplots()
fig.set_size_inches((12,8))
df.loc[new_index,new_columns].T.plot(kind="bar",ax=ax,stacked=True,color=[snakemake.config['plotting']['tech_colors'][i] for i in new_index])
df.loc[new_index,new_columns].T.plot(kind="bar",ax=ax,stacked=True,color=[config['plotting']['tech_colors'][i] for i in new_index])
handles,labels = ax.get_legend_handles_labels()
@ -92,7 +92,7 @@ def plot_costs(infn, fn=None):
handles.reverse()
labels.reverse()
ax.set_ylim([0,snakemake.config['plotting']['costs_max']])
ax.set_ylim([0,config['plotting']['costs_max']])
ax.set_ylabel("System Cost [EUR billion per year]")
@ -109,7 +109,7 @@ def plot_costs(infn, fn=None):
fig.savefig(fn, transparent=True)
def plot_energy(infn, fn=None):
def plot_energy(infn, config, fn=None):
energy_df = pd.read_csv(infn, index_col=list(range(2)),header=[1,2,3])
@ -120,7 +120,7 @@ def plot_energy(infn, fn=None):
df = df.groupby(df.index.map(rename_techs)).sum()
to_drop = df.index[df.abs().max(axis=1) < snakemake.config['plotting']['energy_threshold']]
to_drop = df.index[df.abs().max(axis=1) < config['plotting']['energy_threshold']]
print("dropping")
@ -137,7 +137,7 @@ def plot_energy(infn, fn=None):
fig, ax = plt.subplots()
fig.set_size_inches((12,8))
df.loc[new_index,new_columns].T.plot(kind="bar",ax=ax,stacked=True,color=[snakemake.config['plotting']['tech_colors'][i] for i in new_index])
df.loc[new_index,new_columns].T.plot(kind="bar",ax=ax,stacked=True,color=[config['plotting']['tech_colors'][i] for i in new_index])
handles,labels = ax.get_legend_handles_labels()
@ -145,7 +145,7 @@ def plot_energy(infn, fn=None):
handles.reverse()
labels.reverse()
ax.set_ylim([snakemake.config['plotting']['energy_min'],snakemake.config['plotting']['energy_max']])
ax.set_ylim([config['plotting']['energy_min'], config['plotting']['energy_max']])
ax.set_ylabel("Energy [TWh/a]")
@ -170,10 +170,12 @@ if __name__ == "__main__":
attr='', ext='png', country='all')
configure_logging(snakemake)
summary = snakemake.wildcards.summary
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
summary = wildcards.summary
try:
func = globals()[f"plot_{summary}"]
except KeyError:
raise RuntimeError(f"plotting function for {summary} has not been defined")
func(os.path.join(snakemake.input[0], f"{summary}.csv"), snakemake.output[0])
func(os.path.join(paths[0], f"{summary}.csv"), config, out[0])

View File

@ -2,7 +2,7 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
"""
Extracts capacities of HVDC links from `Wikipedia <https://en.wikipedia.org/wiki/List_of_HVDC_projects>`_.
@ -37,7 +37,7 @@ Description
"""
import logging
from _helpers import configure_logging
from _helpers import configure_logging, retrieve_snakemake_keys
import pandas as pd
@ -63,6 +63,8 @@ if __name__ == "__main__":
snakemake = mock_snakemake('prepare_links_p_nom', simpl='', network='elec')
configure_logging(snakemake)
paths, config, wildcards, logs, out = retrieve_snakemake_keys(snakemake)
links_p_nom = pd.read_html('https://en.wikipedia.org/wiki/List_of_HVDC_projects', header=0, match="SwePol")[0]
mw = "Power (MW)"
@ -74,4 +76,4 @@ if __name__ == "__main__":
links_p_nom['x1'], links_p_nom['y1'] = extract_coordinates(links_p_nom['Converterstation 1'])
links_p_nom['x2'], links_p_nom['y2'] = extract_coordinates(links_p_nom['Converterstation 2'])
links_p_nom.dropna(subset=['x1', 'y1', 'x2', 'y2']).to_csv(snakemake.output[0], index=False)
links_p_nom.dropna(subset=['x1', 'y1', 'x2', 'y2']).to_csv(out[0], index=False)

View File

@ -1,6 +1,6 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
# coding: utf-8
"""
@ -38,12 +38,12 @@ Inputs
------
- ``data/costs.csv``: The database of cost assumptions for all included technologies for specific years from various sources; e.g. discount rate, lifetime, investment (CAPEX), fixed operation and maintenance (FOM), variable operation and maintenance (VOM), fuel costs, efficiency, carbon-dioxide intensity.
- ``networks/elec{year}_s{simpl}_{clusters}.nc``: confer :ref:`cluster`
- ``networks/elec{weather_year}_s{simpl}_{clusters}.nc``: confer :ref:`cluster`
Outputs
-------
- ``networks/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: Complete PyPSA network that will be handed to the ``solve_network`` rule.
- ``networks/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: Complete PyPSA network that will be handed to the ``solve_network`` rule.
Description
-----------
@ -62,7 +62,6 @@ import re
import pypsa
import numpy as np
import pandas as pd
from six import iteritems
from add_electricity import load_costs, update_transmission_costs
@ -71,21 +70,14 @@ idx = pd.IndexSlice
logger = logging.getLogger(__name__)
def add_co2limit(n, Nyears=1., factor=None):
if factor is not None:
annual_emissions = factor*snakemake.config['electricity']['co2base']
else:
annual_emissions = snakemake.config['electricity']['co2limit']
def add_co2limit(n, co2limit, Nyears=1.):
n.add("GlobalConstraint", "CO2Limit",
carrier_attribute="co2_emissions", sense="<=",
constant=annual_emissions * Nyears)
constant=co2limit * Nyears)
def add_emission_prices(n, emission_prices=None, exclude_co2=False):
if emission_prices is None:
emission_prices = snakemake.config['costs']['emission_prices']
def add_emission_prices(n, emission_prices={'co2': 0.}, exclude_co2=False):
if exclude_co2: emission_prices.pop('co2')
ep = (pd.Series(emission_prices).rename(lambda x: x+'_emissions') *
n.carriers.filter(like='_emissions')).sum(axis=1)
@ -95,13 +87,12 @@ def add_emission_prices(n, emission_prices=None, exclude_co2=False):
n.storage_units['marginal_cost'] += su_ep
def set_line_s_max_pu(n):
s_max_pu = snakemake.config['lines']['s_max_pu']
def set_line_s_max_pu(n, s_max_pu = 0.7):
n.lines['s_max_pu'] = s_max_pu
logger.info(f"N-1 security margin of lines set to {s_max_pu}")
def set_transmission_limit(n, ll_type, factor, Nyears=1):
def set_transmission_limit(n, ll_type, factor, costs, Nyears=1):
links_dc_b = n.links.carrier == 'DC' if not n.links.empty else pd.Series()
_lines_s_nom = (np.sqrt(3) * n.lines.type.map(n.line_types.i_nom) *
@ -113,9 +104,6 @@ def set_transmission_limit(n, ll_type, factor, Nyears=1):
ref = (lines_s_nom @ n.lines[col] +
n.links.loc[links_dc_b, "p_nom"] @ n.links.loc[links_dc_b, col])
costs = load_costs(Nyears, snakemake.input.tech_costs,
snakemake.config['costs'],
snakemake.config['electricity'])
update_transmission_costs(n, costs, simple_hvdc_costs=False)
if factor == 'opt' or float(factor) > 1.0:
@ -145,13 +133,14 @@ def average_every_nhours(n, offset):
for c in n.iterate_components():
pnl = getattr(m, c.list_name+"_t")
for k, df in iteritems(c.pnl):
for k, df in c.pnl.items():
if not df.empty:
pnl[k] = df.resample(offset).mean()
return m
def apply_time_segmentation(n, segments):
def apply_time_segmentation(n, segments, solver_name="cbc"):
logger.info(f"Aggregating time series to {segments} segments.")
try:
import tsam.timeseriesaggregation as tsam
@ -170,8 +159,6 @@ def apply_time_segmentation(n, segments):
raw = pd.concat([p_max_pu, load, inflow], axis=1, sort=False)
solver_name = snakemake.config["solving"]["solver"]["name"]
agg = tsam.TimeSeriesAggregation(raw, hoursPerPeriod=len(raw),
noTypicalPeriods=1, noSegments=int(segments),
segmentation=True, solver=solver_name)
@ -208,9 +195,7 @@ def enforce_autarky(n, only_crossborder=False):
n.mremove("Line", lines_rm)
n.mremove("Link", links_rm)
def set_line_nom_max(n):
s_nom_max_set = snakemake.config["lines"].get("s_nom_max,", np.inf)
p_nom_max_set = snakemake.config["links"].get("p_nom_max", np.inf)
def set_line_nom_max(n, s_nom_max_set=np.inf, p_nom_max_set=np.inf):
n.lines.s_nom_max.clip(upper=s_nom_max_set, inplace=True)
n.links.p_nom_max.clip(upper=p_nom_max_set, inplace=True)
@ -224,9 +209,10 @@ if __name__ == "__main__":
opts = snakemake.wildcards.opts.split('-')
n = pypsa.Network(snakemake.input[0])
Nyears = n.snapshot_weightings.sum() / 8760.
Nyears = n.snapshot_weightings.objective.sum() / 8760.
costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears)
set_line_s_max_pu(n)
set_line_s_max_pu(n, snakemake.config['lines']['s_max_pu'])
for o in opts:
m = re.match(r'^\d+h$', o, re.IGNORECASE)
@ -237,16 +223,18 @@ if __name__ == "__main__":
for o in opts:
m = re.match(r'^\d+seg$', o, re.IGNORECASE)
if m is not None:
n = apply_time_segmentation(n, m.group(0)[:-3])
solver_name = snakemake.config["solving"]["solver"]["name"]
n = apply_time_segmentation(n, m.group(0)[:-3], solver_name)
break
for o in opts:
if "Co2L" in o:
m = re.findall("[0-9]*\.?[0-9]+$", o)
if len(m) > 0:
add_co2limit(n, Nyears, float(m[0]))
co2limit = float(m[0]) * snakemake.config['electricity']['co2base']
add_co2limit(n, co2limit, Nyears)
else:
add_co2limit(n, Nyears)
add_co2limit(n, snakemake.config['electricity']['co2limit'], Nyears)
break
for o in opts:
@ -267,12 +255,13 @@ if __name__ == "__main__":
c.df.loc[sel,attr] *= factor
if 'Ep' in opts:
add_emission_prices(n)
add_emission_prices(n, snakemake.config['costs']['emission_prices'])
ll_type, factor = snakemake.wildcards.ll[0], snakemake.wildcards.ll[1:]
set_transmission_limit(n, ll_type, factor, Nyears)
set_transmission_limit(n, ll_type, factor, costs, Nyears)
set_line_nom_max(n)
set_line_nom_max(n, s_nom_max_set=snakemake.config["lines"].get("s_nom_max,", np.inf),
p_nom_max_set=snakemake.config["links"].get("p_nom_max,", np.inf))
if "ATK" in opts:
enforce_autarky(n)

View File

@ -1,75 +0,0 @@
# SPDX-FileCopyrightText: 2019-2020 Fabian Hofmann (FIAS)
#
# SPDX-License-Identifier: GPL-3.0-or-later
"""
.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.3517949.svg
:target: https://doi.org/10.5281/zenodo.3517949
Cutouts are spatiotemporal subsets of the European weather data from the `ECMWF ERA5 <https://software.ecmwf.int/wiki/display/CKB/ERA5+data+documentation>`_ reanalysis dataset and the `CMSAF SARAH-2 <https://wui.cmsaf.eu/safira/action/viewDoiDetails?acronym=SARAH_V002>`_ solar surface radiation dataset for the year 2013 (3.9 GB).
They have been prepared by and are for use with the `atlite <https://github.com/PyPSA/atlite>`_ tool. You can either generate them yourself using the ``build_cutouts`` rule or retrieve them directly from `zenodo <https://doi.org/10.5281/zenodo.3517949>`_ through the rule ``retrieve_cutout`` described here.
.. note::
To download cutouts yourself from the `ECMWF ERA5 <https://software.ecmwf.int/wiki/display/CKB/ERA5+data+documentation>`_ you need to `set up the CDS API <https://cds.climate.copernicus.eu/api-how-to>`_.
The :ref:`tutorial` uses smaller `cutouts <https://zenodo.org/record/3518020/files/pypsa-eur-tutorial-cutouts.tar.xz>`_ than required for the full model (19 MB)
.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.3518020.svg
:target: https://doi.org/10.5281/zenodo.3518020
**Relevant Settings**
.. code:: yaml
tutorial:
enable:
build_cutout:
.. seealso::
Documentation of the configuration file ``config.yaml`` at
:ref:`toplevel_cf`
**Outputs**
- ``cutouts/{cutout}``: weather data from either the `ERA5 <https://www.ecmwf.int/en/forecasts/datasets/reanalysis-datasets/era5>`_ reanalysis weather dataset or `SARAH-2 <https://wui.cmsaf.eu/safira/action/viewProduktSearch>`_ satellite-based historic weather data.
.. seealso::
For details see :mod:`build_cutout` and read the `atlite documentation <https://atlite.readthedocs.io>`_.
"""
import logging
logger = logging.getLogger(__name__)
from pathlib import Path
import tarfile
from _helpers import progress_retrieve, configure_logging
if __name__ == "__main__":
if 'snakemake' not in globals():
from _helpers import mock_snakemake
snakemake = mock_snakemake('retrieve_cutout')
rootpath = '..'
else:
rootpath = '.'
configure_logging(snakemake) # TODO Make logging compatible with progressbar (see PR #102)
if snakemake.config['tutorial']:
url = "https://zenodo.org/record/3518020/files/pypsa-eur-tutorial-cutouts.tar.xz"
else:
url = "https://zenodo.org/record/3517949/files/pypsa-eur-cutouts.tar.xz"
# Save location
tarball_fn = Path(f"{rootpath}/cutouts.tar.xz")
logger.info(f"Downloading cutouts from '{url}'.")
progress_retrieve(url, tarball_fn)
logger.info(f"Extracting cutouts.")
tarfile.open(tarball_fn).extractall(path=rootpath)
tarball_fn.unlink()
logger.info(f"Cutouts available in '{Path(tarball_fn.stem).stem}'.")

View File

@ -1,7 +1,7 @@
# Copyright 2019-2020 Fabian Hofmann (FIAS)
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
"""
.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.3517935.svg

View File

@ -1,49 +0,0 @@
# Copyright 2019-2020 Fabian Hofmann (FIAS)
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
"""
.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.3518215.svg
:target: https://doi.org/10.5281/zenodo.3518215
This rule, as a substitute for :mod:`build_natura_raster`, downloads an already rasterized version (`natura.tiff <https://zenodo.org/record/3518215/files/natura.tiff>`_) of `Natura 2000 <https://en.wikipedia.org/wiki/Natura_2000>`_ natural protection areas to reduce computation times. The file is placed into the ``resources`` sub-directory.
**Relevant Settings**
.. code:: yaml
enable:
build_natura_raster:
.. seealso::
Documentation of the configuration file ``config.yaml`` at
:ref:`toplevel_cf`
**Outputs**
- ``resources/natura.tiff``: Rasterized version of `Natura 2000 <https://en.wikipedia.org/wiki/Natura_2000>`_ natural protection areas to reduce computation times.
.. seealso::
For details see :mod:`build_natura_raster`.
"""
import logging
from _helpers import progress_retrieve, configure_logging
logger = logging.getLogger(__name__)
if __name__ == "__main__":
if 'snakemake' not in globals():
from _helpers import mock_snakemake
snakemake = mock_snakemake('retrieve_natura_raster')
configure_logging(snakemake) # TODO Make logging compatible with progressbar (see PR #102)
url = "https://zenodo.org/record/3518215/files/natura.tiff"
logger.info(f"Downloading natura raster from '{url}'.")
progress_retrieve(url, snakemake.output[0])
logger.info(f"Natura raster available as '{snakemake.output[0]}'.")

View File

@ -1,6 +1,6 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
# coding: utf-8
"""
@ -52,18 +52,18 @@ Inputs
Outputs
-------
- ``resources/regions_onshore_elec{year}_s{simpl}.geojson``:
- ``resources/regions_onshore_elec{weather_year}_s{simpl}.geojson``:
.. image:: ../img/regions_onshore_elec_s.png
:scale: 33 %
- ``resources/regions_offshore_elec{year}_s{simpl}.geojson``:
- ``resources/regions_offshore_elec{weather_year}_s{simpl}.geojson``:
.. image:: ../img/regions_offshore_elec_s .png
:scale: 33 %
- ``resources/busmap_elec{year}_s{simpl}.h5``: Mapping of buses from ``networks/elec.nc`` to ``networks/elec{year}_s{simpl}.nc``; has keys ['/busmap_s']
- ``networks/elec{year}_s{simpl}.nc``:
- ``resources/busmap_elec{weather_year}_s{simpl}.h5``: Mapping of buses from ``networks/elec.nc`` to ``networks/elec{weather_year}_s{simpl}.nc``; has keys ['/busmap_s']
- ``networks/elec{weather_year}_s{simpl}.nc``:
.. image:: ../img/elec_s.png
:scale: 33 %
@ -83,7 +83,7 @@ The rule :mod:`simplify_network` does up to four things:
"""
import logging
from _helpers import configure_logging
from _helpers import configure_logging, update_p_nom_max
from cluster_network import clustering_for_n_clusters, cluster_regions
from add_electricity import load_costs
@ -93,12 +93,11 @@ import numpy as np
import scipy as sp
from scipy.sparse.csgraph import connected_components, dijkstra
from six import iteritems
from six.moves import reduce
from functools import reduce
import pypsa
from pypsa.io import import_components_from_dataframe, import_series_from_dataframe
from pypsa.networkclustering import busmap_by_stubs, aggregategenerators, aggregateoneport
from pypsa.networkclustering import busmap_by_stubs, aggregategenerators, aggregateoneport, get_clustering_from_busmap, _make_consense
logger = logging.getLogger(__name__)
@ -125,7 +124,8 @@ def simplify_network_to_380(n):
several_trafo_b = trafo_map.isin(trafo_map.index)
trafo_map.loc[several_trafo_b] = trafo_map.loc[several_trafo_b].map(trafo_map)
missing_buses_i = n.buses.index.difference(trafo_map.index)
trafo_map = trafo_map.append(pd.Series(missing_buses_i, missing_buses_i))
missing = pd.Series(missing_buses_i, missing_buses_i)
trafo_map = pd.concat([trafo_map, missing])
for c in n.one_port_components|n.branch_components:
df = n.df(c)
@ -139,18 +139,15 @@ def simplify_network_to_380(n):
return n, trafo_map
def _prepare_connection_costs_per_link(n):
def _prepare_connection_costs_per_link(n, costs, config):
if n.links.empty: return {}
costs = load_costs(n.snapshot_weightings.sum() / 8760, snakemake.input.tech_costs,
snakemake.config['costs'], snakemake.config['electricity'])
connection_costs_per_link = {}
for tech in snakemake.config['renewable']:
for tech in config['renewable']:
if tech.startswith('offwind'):
connection_costs_per_link[tech] = (
n.links.length * snakemake.config['lines']['length_factor'] *
n.links.length * config['lines']['length_factor'] *
(n.links.underwater_fraction * costs.at[tech + '-connection-submarine', 'capital_cost'] +
(1. - n.links.underwater_fraction) * costs.at[tech + '-connection-underground', 'capital_cost'])
)
@ -158,9 +155,9 @@ def _prepare_connection_costs_per_link(n):
return connection_costs_per_link
def _compute_connection_costs_to_bus(n, busmap, connection_costs_per_link=None, buses=None):
def _compute_connection_costs_to_bus(n, busmap, costs, config, connection_costs_per_link=None, buses=None):
if connection_costs_per_link is None:
connection_costs_per_link = _prepare_connection_costs_per_link(n)
connection_costs_per_link = _prepare_connection_costs_per_link(n, costs, config)
if buses is None:
buses = busmap.index[busmap.index != busmap.values]
@ -178,7 +175,8 @@ def _compute_connection_costs_to_bus(n, busmap, connection_costs_per_link=None,
return connection_costs_to_bus
def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus):
def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus, output):
connection_costs = {}
for tech in connection_costs_to_bus:
tech_b = n.generators.carrier == tech
costs = n.generators.loc[tech_b, "bus"].map(connection_costs_to_bus[tech]).loc[lambda s: s>0]
@ -186,20 +184,23 @@ def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus):
n.generators.loc[costs.index, "capital_cost"] += costs
logger.info("Displacing {} generator(s) and adding connection costs to capital_costs: {} "
.format(tech, ", ".join("{:.0f} Eur/MW/a for `{}`".format(d, b) for b, d in costs.iteritems())))
connection_costs[tech] = costs
pd.DataFrame(connection_costs).to_csv(output.connection_costs)
def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate_one_ports={"Load", "StorageUnit"}):
def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, output, aggregate_one_ports={"Load", "StorageUnit"}):
def replace_components(n, c, df, pnl):
n.mremove(c, n.df(c).index)
import_components_from_dataframe(n, df, c)
for attr, df in iteritems(pnl):
for attr, df in pnl.items():
if not df.empty:
import_series_from_dataframe(n, df, c, attr)
_adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus)
_adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus, output)
generators, generators_pnl = aggregategenerators(n, busmap)
generators, generators_pnl = aggregategenerators(n, busmap, custom_strategies={'p_nom_min': np.sum})
replace_components(n, "Generator", generators, generators_pnl)
for one_port in aggregate_one_ports:
@ -213,7 +214,7 @@ def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate
n.mremove(c, df.index[df.bus0.isin(buses_to_del) | df.bus1.isin(buses_to_del)])
def simplify_links(n):
def simplify_links(n, costs, config, output):
## Complex multi-node links are folded into end-points
logger.info("Simplifying connected link components")
@ -237,7 +238,7 @@ def simplify_links(n):
if len(G.adj[m]) > 2 or (set(G.adj[m]) - nodes)}
for u in supernodes:
for m, ls in iteritems(G.adj[u]):
for m, ls in G.adj[u].items():
if m not in nodes or m in seen: continue
buses = [u, m]
@ -245,7 +246,7 @@ def simplify_links(n):
while m not in (supernodes | seen):
seen.add(m)
for m2, ls in iteritems(G.adj[m]):
for m2, ls in G.adj[m].items():
if m2 in seen or m2 == u: continue
buses.append(m2)
links.append(list(ls)) # [name for name in ls])
@ -260,7 +261,7 @@ def simplify_links(n):
busmap = n.buses.index.to_series()
connection_costs_per_link = _prepare_connection_costs_per_link(n)
connection_costs_per_link = _prepare_connection_costs_per_link(n, costs, config)
connection_costs_to_bus = pd.DataFrame(0., index=n.buses.index, columns=list(connection_costs_per_link))
for lbl in labels.value_counts().loc[lambda s: s > 2].index:
@ -274,11 +275,11 @@ def simplify_links(n):
m = sp.spatial.distance_matrix(n.buses.loc[b, ['x', 'y']],
n.buses.loc[buses[1:-1], ['x', 'y']])
busmap.loc[buses] = b[np.r_[0, m.argmin(axis=0), 1]]
connection_costs_to_bus.loc[buses] += _compute_connection_costs_to_bus(n, busmap, connection_costs_per_link, buses)
connection_costs_to_bus.loc[buses] += _compute_connection_costs_to_bus(n, busmap, costs, config, connection_costs_per_link, buses)
all_links = [i for _, i in sum(links, [])]
p_max_pu = snakemake.config['links'].get('p_max_pu', 1.)
p_max_pu = config['links'].get('p_max_pu', 1.)
lengths = n.links.loc[all_links, 'length']
name = lengths.idxmax() + '+{}'.format(len(links) - 1)
params = dict(
@ -305,39 +306,77 @@ def simplify_links(n):
logger.debug("Collecting all components using the busmap")
_aggregate_and_move_components(n, busmap, connection_costs_to_bus)
_aggregate_and_move_components(n, busmap, connection_costs_to_bus, output)
return n, busmap
def remove_stubs(n):
def remove_stubs(n, costs, config, output):
logger.info("Removing stubs")
busmap = busmap_by_stubs(n) # ['country'])
connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap)
connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap, costs, config)
_aggregate_and_move_components(n, busmap, connection_costs_to_bus)
_aggregate_and_move_components(n, busmap, connection_costs_to_bus, output)
return n, busmap
def aggregate_to_substations(n, buses_i=None):
# can be used to aggregate a selection of buses to electrically closest neighbors
# if no buses are given, nodes that are no substations or without offshore connection are aggregated
if buses_i is None:
logger.info("Aggregating buses that are no substations or have no valid offshore connection")
buses_i = list(set(n.buses.index)-set(n.generators.bus)-set(n.loads.bus))
def cluster(n, n_clusters):
weight = pd.concat({'Line': n.lines.length/n.lines.s_nom.clip(1e-3),
'Link': n.links.length/n.links.p_nom.clip(1e-3)})
adj = n.adjacency_matrix(branch_components=['Line', 'Link'], weights=weight)
bus_indexer = n.buses.index.get_indexer(buses_i)
dist = pd.DataFrame(dijkstra(adj, directed=False, indices=bus_indexer), buses_i, n.buses.index)
dist[buses_i] = np.inf # bus in buses_i should not be assigned to different bus in buses_i
for c in n.buses.country.unique():
incountry_b = n.buses.country == c
dist.loc[incountry_b, ~incountry_b] = np.inf
busmap = n.buses.index.to_series()
busmap.loc[buses_i] = dist.idxmin(1)
clustering = get_clustering_from_busmap(n, busmap,
bus_strategies=dict(country=_make_consense("Bus", "country")),
aggregate_generators_weighted=True,
aggregate_generators_carriers=None,
aggregate_one_ports=["Load", "StorageUnit"],
line_length_factor=1.0,
generator_strategies={'p_nom_max': 'sum'},
scale_link_capital_costs=False)
return clustering.network, busmap
def cluster(n, n_clusters, config):
logger.info(f"Clustering to {n_clusters} buses")
focus_weights = config.get('focus_weights', None)
renewable_carriers = pd.Index([tech
for tech in n.generators.carrier.unique()
if tech.split('-', 2)[0] in snakemake.config['renewable']])
if tech.split('-', 2)[0] in config['renewable']])
def consense(x):
v = x.iat[0]
assert ((x == v).all() or x.isnull().all()), (
"The `potential` configuration option must agree for all renewable carriers, for now!"
)
return v
potential_mode = (consense(pd.Series([snakemake.config['renewable'][tech]['potential']
potential_mode = (consense(pd.Series([config['renewable'][tech]['potential']
for tech in renewable_carriers]))
if len(renewable_carriers) > 0 else 'conservative')
clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap=False, potential_mode=potential_mode,
solver_name=snakemake.config['solving']['solver']['name'])
solver_name=config['solving']['solver']['name'],
focus_weights=focus_weights)
return clustering.network, clustering.busmap
@ -352,16 +391,31 @@ if __name__ == "__main__":
n, trafo_map = simplify_network_to_380(n)
n, simplify_links_map = simplify_links(n)
Nyears = n.snapshot_weightings.objective.sum() / 8760
n, stub_map = remove_stubs(n)
technology_costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears)
n, simplify_links_map = simplify_links(n, technology_costs, snakemake.config, snakemake.output)
n, stub_map = remove_stubs(n, technology_costs, snakemake.config, snakemake.output)
busmaps = [trafo_map, simplify_links_map, stub_map]
if snakemake.config.get('clustering', {}).get('simplify', {}).get('to_substations', False):
n, substation_map = aggregate_to_substations(n)
busmaps.append(substation_map)
if snakemake.wildcards.simpl:
n, cluster_map = cluster(n, int(snakemake.wildcards.simpl))
n, cluster_map = cluster(n, int(snakemake.wildcards.simpl), snakemake.config)
busmaps.append(cluster_map)
# some entries in n.buses are not updated in previous functions, therefore can be wrong. as they are not needed
# and are lost when clustering (for example with the simpl wildcard), we remove them for consistency:
buses_c = {'symbol', 'tags', 'under_construction', 'substation_lv', 'substation_off'}.intersection(n.buses.columns)
n.buses = n.buses.drop(buses_c, axis=1)
update_p_nom_max(n)
n.export_to_netcdf(snakemake.output.network)
busmap_s = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0])

View File

@ -1,6 +1,6 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
"""
Solves linear optimal power flow for a network iteratively while updating reactances.
@ -32,12 +32,12 @@ Relevant Settings
Inputs
------
- ``networks/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: confer :ref:`prepare`
- ``networks/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: confer :ref:`prepare`
Outputs
-------
- ``results/networks/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: Solved PyPSA network including optimisation results
- ``results/networks/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: Solved PyPSA network including optimisation results
.. image:: ../img/results.png
:scale: 40 %
@ -101,8 +101,9 @@ def prepare_network(n, solve_opts):
if solve_opts.get('load_shedding'):
n.add("Carrier", "Load")
n.madd("Generator", n.buses.index, " load",
bus=n.buses.index,
buses_i = n.buses.query("carrier == 'AC'").index
n.madd("Generator", buses_i, " load",
bus=buses_i,
carrier='load',
sign=1e-3, # Adjust sign to measure p and p_nom in kW instead of MW
marginal_cost=1e2, # Eur/kWh
@ -127,7 +128,7 @@ def prepare_network(n, solve_opts):
if solve_opts.get('nhours'):
nhours = solve_opts['nhours']
n.set_snapshots(n.snapshots[:nhours])
n.snapshot_weightings[:] = 8760./nhours
n.snapshot_weightings[:] = 8760. / nhours
return n
@ -174,16 +175,16 @@ def add_EQ_constraints(n, o, scaling=1e-1):
ggrouper = n.generators.bus
lgrouper = n.loads.bus
sgrouper = n.storage_units.bus
load = n.snapshot_weightings @ \
load = n.snapshot_weightings.generators @ \
n.loads_t.p_set.groupby(lgrouper, axis=1).sum()
inflow = n.snapshot_weightings @ \
inflow = n.snapshot_weightings.stores @ \
n.storage_units_t.inflow.groupby(sgrouper, axis=1).sum()
inflow = inflow.reindex(load.index).fillna(0.)
rhs = scaling * ( level * load - inflow )
lhs_gen = linexpr((n.snapshot_weightings * scaling,
lhs_gen = linexpr((n.snapshot_weightings.generators * scaling,
get_var(n, "Generator", "p").T)
).T.groupby(ggrouper, axis=1).apply(join_exprs)
lhs_spill = linexpr((-n.snapshot_weightings * scaling,
lhs_spill = linexpr((-n.snapshot_weightings.stores * scaling,
get_var(n, "StorageUnit", "spill").T)
).T.groupby(sgrouper, axis=1).apply(join_exprs)
lhs_spill = lhs_spill.reindex(lhs_gen.index).fillna("")
@ -241,7 +242,7 @@ def extra_functionality(n, snapshots):
add_battery_constraints(n)
def solve_network(n, config, solver_log=None, opts='', **kwargs):
def solve_network(n, config, opts='', **kwargs):
solver_options = config['solving']['solver'].copy()
solver_name = solver_options.pop('name')
cf_solving = config['solving']['options']
@ -282,8 +283,8 @@ if __name__ == "__main__":
with memory_logger(filename=fn, interval=30.) as mem:
n = pypsa.Network(snakemake.input[0])
n = prepare_network(n, solve_opts)
n = solve_network(n, config=snakemake.config, solver_dir=tmpdir,
solver_log=snakemake.log.solver, opts=opts)
n = solve_network(n, snakemake.config, opts, solver_dir=tmpdir,
solver_logfile=snakemake.log.solver)
n.export_to_netcdf(snakemake.output[0])
logger.info("Maximum memory usage: {}".format(mem.mem_usage))

View File

@ -1,6 +1,6 @@
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
#
# SPDX-License-Identifier: GPL-3.0-or-later
# SPDX-License-Identifier: MIT
"""
Solves linear optimal dispatch in hourly resolution
@ -32,13 +32,13 @@ Relevant Settings
Inputs
------
- ``networks/elec{year}_s{simpl}_{clusters}.nc``: confer :ref:`cluster`
- ``results/networks/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: confer :ref:`solve`
- ``networks/elec{weather_year}_s{simpl}_{clusters}.nc``: confer :ref:`cluster`
- ``results/networks/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc``: confer :ref:`solve`
Outputs
-------
- ``results/networks/elec{year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_op.nc``: Solved PyPSA network for optimal dispatch including optimisation results
- ``results/networks/elec{weather_year}_s{simpl}_{clusters}_ec_l{ll}_{opts}_op.nc``: Solved PyPSA network for optimal dispatch including optimisation results
Description
-----------
@ -71,7 +71,7 @@ def set_parameters_from_optimized(n, n_optim):
n_optim.lines[attr].reindex(lines_untyped_i, fill_value=0.)
n.lines['s_nom_extendable'] = False
links_dc_i = n.links.index[n.links.carrier == 'DC']
links_dc_i = n.links.index[n.links.p_nom_extendable]
n.links.loc[links_dc_i, 'p_nom'] = \
n_optim.links['p_nom_opt'].reindex(links_dc_i, fill_value=0.)
n.links.loc[links_dc_i, 'p_nom_extendable'] = False
@ -81,10 +81,15 @@ def set_parameters_from_optimized(n, n_optim):
n_optim.generators['p_nom_opt'].reindex(gen_extend_i, fill_value=0.)
n.generators.loc[gen_extend_i, 'p_nom_extendable'] = False
stor_extend_i = n.storage_units.index[n.storage_units.p_nom_extendable]
n.storage_units.loc[stor_extend_i, 'p_nom'] = \
n_optim.storage_units['p_nom_opt'].reindex(stor_extend_i, fill_value=0.)
n.storage_units.loc[stor_extend_i, 'p_nom_extendable'] = False
stor_units_extend_i = n.storage_units.index[n.storage_units.p_nom_extendable]
n.storage_units.loc[stor_units_extend_i, 'p_nom'] = \
n_optim.storage_units['p_nom_opt'].reindex(stor_units_extend_i, fill_value=0.)
n.storage_units.loc[stor_units_extend_i, 'p_nom_extendable'] = False
stor_extend_i = n.stores.index[n.stores.e_nom_extendable]
n.stores.loc[stor_extend_i, 'e_nom'] = \
n_optim.stores['e_nom_opt'].reindex(stor_extend_i, fill_value=0.)
n.stores.loc[stor_extend_i, 'e_nom_extendable'] = False
return n
@ -104,15 +109,14 @@ if __name__ == "__main__":
n = set_parameters_from_optimized(n, n_optim)
del n_optim
config = snakemake.config
opts = snakemake.wildcards.opts.split('-')
config['solving']['options']['skip_iterations'] = False
snakemake.config['solving']['options']['skip_iterations'] = False
fn = getattr(snakemake.log, 'memory', None)
with memory_logger(filename=fn, interval=30.) as mem:
n = prepare_network(n, solve_opts=snakemake.config['solving']['options'])
n = solve_network(n, config, solver_dir=tmpdir,
solver_log=snakemake.log.solver, opts=opts)
n = prepare_network(n, snakemake.config['solving']['options'])
n = solve_network(n, snakemake.config, opts, solver_dir=tmpdir,
solver_logfile=snakemake.log.solver)
n.export_to_netcdf(snakemake.output[0])
logger.info("Maximum memory usage: {}".format(mem.mem_usage))

View File

@ -2,7 +2,7 @@
#
# SPDX-License-Identifier: CC0-1.0
version: 0.3.0
version: 0.4.0
tutorial: true
logging:
level: INFO
@ -19,9 +19,13 @@ scenario:
countries: ['DE']
clustering:
simplify:
to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections)
snapshots:
start: "2013-03-01"
end: "2014-04-01"
end: "2013-03-08"
closed: 'left' # end is not inclusive
enable:
@ -54,16 +58,15 @@ electricity:
atlite:
nprocesses: 4
cutouts:
europe-2013-era5:
europe-2013-era5-tutorial:
module: era5
xs: [4., 15.]
ys: [56., 46.]
months: [3, 3]
years: [2013, 2013]
x: [4., 15.]
y: [46., 56.]
time: ["2013-03-01", "2013-03-08"]
renewable:
onwind:
cutout: europe-2013-era5
cutout: europe-2013-era5-tutorial
resource:
method: wind
turbine: Vestas_V112_3MW
@ -80,7 +83,7 @@ renewable:
potential: simple # or conservative
clip_p_max_pu: 1.e-2
offwind-ac:
cutout: europe-2013-era5
cutout: europe-2013-era5-tutorial
resource:
method: wind
turbine: NREL_ReferenceTurbine_5MW_offshore
@ -92,7 +95,7 @@ renewable:
potential: simple # or conservative
clip_p_max_pu: 1.e-2
offwind-dc:
cutout: europe-2013-era5
cutout: europe-2013-era5-tutorial
resource:
method: wind
turbine: NREL_ReferenceTurbine_5MW_offshore
@ -105,7 +108,7 @@ renewable:
potential: simple # or conservative
clip_p_max_pu: 1.e-2
solar:
cutout: europe-2013-era5
cutout: europe-2013-era5-tutorial
resource:
method: pv
panel: CSi
@ -147,7 +150,6 @@ transformers:
type: ''
load:
url: https://data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv
power_statistics: True # only for files from <2019; set false in order to get ENTSOE transparency data
interpolate_limit: 3 # data gaps up until this size are interpolated linearly
time_shift_for_large_gaps: 1w # data gaps up until this size are copied by copying from
@ -194,8 +196,8 @@ solving:
# threads: 4
# lpmethod: 4 # barrier
# solutiontype: 2 # non basic solution, ie no crossover
# barrier_convergetol: 1.e-5
# feasopt_tolerance: 1.e-6
# barrier.convergetol: 1.e-5
# feasopt.tolerance: 1.e-6
plotting:
map: