Merge branch 'master' into technology-data
This commit is contained in:
commit
520f7e440e
99
.github/workflows/ci.yaml
vendored
Normal file
99
.github/workflows/ci.yaml
vendored
Normal file
@ -0,0 +1,99 @@
|
||||
# SPDX-FileCopyrightText: : 2021 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: CC0-1.0
|
||||
|
||||
name: CI
|
||||
|
||||
# Caching method based on and described by:
|
||||
# epassaro (2021): https://dev.to/epassaro/caching-anaconda-environments-in-github-actions-5hde
|
||||
# and code in GitHub repo: https://github.com/epassaro/cache-conda-envs
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
pull_request:
|
||||
schedule:
|
||||
- cron: "0 5 * * TUE"
|
||||
|
||||
env:
|
||||
CONDA_CACHE_NUMBER: 1 # Change this value to manually reset the environment cache
|
||||
DATA_CACHE_NUMBER: 1
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# Matrix required to handle caching with Mambaforge
|
||||
- os: ubuntu-latest
|
||||
label: ubuntu-latest
|
||||
prefix: /usr/share/miniconda3/envs/pypsa-eur
|
||||
|
||||
- os: macos-latest
|
||||
label: macos-latest
|
||||
prefix: /Users/runner/miniconda3/envs/pypsa-eur
|
||||
|
||||
- os: windows-latest
|
||||
label: windows-latest
|
||||
prefix: C:\Miniconda3\envs\pypsa-eur
|
||||
|
||||
name: ${{ matrix.label }}
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash -l {0}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Setup secrets
|
||||
run: |
|
||||
echo -ne "url: ${CDSAPI_URL}\nkey: ${CDSAPI_TOKEN}\n" > ~/.cdsapirc
|
||||
|
||||
- name: Add solver to environment
|
||||
run: |
|
||||
echo -e " - glpk\n - ipopt<3.13.3" >> envs/environment.yaml
|
||||
|
||||
- name: Setup Mambaforge
|
||||
uses: conda-incubator/setup-miniconda@v2
|
||||
with:
|
||||
miniforge-variant: Mambaforge
|
||||
miniforge-version: latest
|
||||
activate-environment: pypsa-eur
|
||||
use-mamba: true
|
||||
|
||||
- name: Set cache dates
|
||||
run: |
|
||||
echo "DATE=$(date +'%Y%m%d')" >> $GITHUB_ENV
|
||||
echo "WEEK=$(date +'%Y%U')" >> $GITHUB_ENV
|
||||
|
||||
- name: Cache data and cutouts folders
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: |
|
||||
data
|
||||
cutouts
|
||||
key: data-cutouts-${{ env.WEEK }}-${{ env.DATA_CACHE_NUMBER }}
|
||||
|
||||
- name: Create environment cache
|
||||
uses: actions/cache@v2
|
||||
id: cache
|
||||
with:
|
||||
path: ${{ matrix.prefix }}
|
||||
key: ${{ matrix.label }}-conda-${{ hashFiles('envs/environment.yaml') }}-${{ env.DATE }}-${{ env.CONDA_CACHE_NUMBER }}
|
||||
|
||||
- name: Update environment due to outdated or unavailable cache
|
||||
run: mamba env update -n pypsa-eur -f envs/environment.yaml
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
|
||||
- name: Test snakemake workflow
|
||||
run: |
|
||||
conda activate pypsa-eur
|
||||
conda list
|
||||
cp test/config.test1.yaml config.yaml
|
||||
snakemake --cores all solve_all_networks
|
||||
rm -rf resources/*.nc resources/*.geojson resources/*.h5 networks results
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -19,6 +19,7 @@ gurobi.log
|
||||
/data
|
||||
/data/links_p_nom.csv
|
||||
/cutouts
|
||||
/dask-worker-space
|
||||
|
||||
doc/_build
|
||||
|
||||
|
@ -4,5 +4,8 @@
|
||||
|
||||
version: 2
|
||||
|
||||
conda:
|
||||
environment: envs/environment.docs.yaml
|
||||
python:
|
||||
version: 3.8
|
||||
install:
|
||||
- requirements: doc/requirements.txt
|
||||
system_packages: true
|
||||
|
19
.syncignore-receive
Normal file
19
.syncignore-receive
Normal file
@ -0,0 +1,19 @@
|
||||
# SPDX-FileCopyrightText: : 2021 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: CC0-1.0
|
||||
|
||||
.snakemake
|
||||
.git
|
||||
.pytest_cache
|
||||
.ipynb_checkpoints
|
||||
.vscode
|
||||
.DS_Store
|
||||
__pycache__
|
||||
*.pyc
|
||||
*.pyo
|
||||
*.ipynb
|
||||
notebooks
|
||||
doc
|
||||
cutouts
|
||||
data/bundle
|
||||
*.nc
|
23
.syncignore-send
Normal file
23
.syncignore-send
Normal file
@ -0,0 +1,23 @@
|
||||
# SPDX-FileCopyrightText: : 2021 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: CC0-1.0
|
||||
|
||||
.snakemake
|
||||
.git
|
||||
.pytest_cache
|
||||
.ipynb_checkpoints
|
||||
.vscode
|
||||
.DS_Store
|
||||
__pycache__
|
||||
*.pyc
|
||||
*.pyo
|
||||
*.ipynb
|
||||
notebooks
|
||||
benchmarks
|
||||
logs
|
||||
resources*
|
||||
results
|
||||
networks*
|
||||
cutouts
|
||||
data/bundle
|
||||
doc
|
39
.travis.yml
39
.travis.yml
@ -1,39 +0,0 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
|
||||
os:
|
||||
- windows
|
||||
- linux
|
||||
- osx
|
||||
|
||||
language: bash
|
||||
|
||||
before_install:
|
||||
# install conda
|
||||
- wget https://raw.githubusercontent.com/trichter/conda4travis/latest/conda4travis.sh -O conda4travis.sh
|
||||
- source conda4travis.sh
|
||||
|
||||
# install conda environment
|
||||
- conda install -c conda-forge mamba
|
||||
- mamba env create -f ./envs/environment.yaml
|
||||
- conda activate pypsa-eur
|
||||
|
||||
# install open-source solver
|
||||
- mamba install -c conda-forge glpk ipopt'<3.13.3'
|
||||
|
||||
# list packages for easier debugging
|
||||
- conda list
|
||||
|
||||
before_script:
|
||||
- 'echo -ne "url: ${CDSAPI_URL}\nkey: ${CDSAPI_TOKEN}\n" > ~/.cdsapirc'
|
||||
|
||||
script:
|
||||
- cp ./test/config.test1.yaml ./config.yaml
|
||||
- snakemake -j all solve_all_networks
|
||||
- rm -rf resources/*.nc resources/*.geojson resources/*.h5 networks results
|
||||
# could repeat for more configurations in future
|
34
CITATION.cff
Normal file
34
CITATION.cff
Normal file
@ -0,0 +1,34 @@
|
||||
# SPDX-FileCopyrightText: : 2021 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: CC0-1.0
|
||||
|
||||
cff-version: 1.1.0
|
||||
message: "If you use this package, please cite the corresponding manuscript in Energy Strategy Reviews."
|
||||
title: "PyPSA-Eur: An open optimisation model of the European transmission system"
|
||||
repository: https://github.com/pypsa/pypsa-eur
|
||||
version: 0.4.0
|
||||
license: MIT
|
||||
journal: Energy Strategy Reviews
|
||||
doi: 10.1016/j.esr.2018.08.012
|
||||
authors:
|
||||
- family-names: Hörsch
|
||||
given-names: Jonas
|
||||
orcid: https://orcid.org/0000-0001-9438-767X
|
||||
- family-names: Brown
|
||||
given-names: Tom
|
||||
orcid: https://orcid.org/0000-0001-5898-1911
|
||||
- family-names: Hofmann
|
||||
given-names: Fabian
|
||||
orcid: https://orcid.org/0000-0002-6604-5450
|
||||
- family-names: Neumann
|
||||
given-names: Fabian
|
||||
orcid: https://orcid.org/0000-0001-8551-1480
|
||||
- family-names: Frysztacki
|
||||
given-names: Martha
|
||||
orcid: https://orcid.org/0000-0002-0788-1328
|
||||
- family-names: Hampp
|
||||
given-names: Johannes
|
||||
orcid: https://orcid.org/0000-0002-1776-116X
|
||||
- family-names: Schlachtberger
|
||||
given-names: David
|
||||
orcid: https://orcid.org/0000-0002-8167-8213
|
@ -1,625 +0,0 @@
|
||||
GNU GENERAL PUBLIC LICENSE
|
||||
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright © 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
|
||||
Everyone is permitted to copy and distribute verbatim copies of this license
|
||||
document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for software and
|
||||
other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed to take
|
||||
away your freedom to share and change the works. By contrast, the GNU General
|
||||
Public License is intended to guarantee your freedom to share and change all
|
||||
versions of a program--to make sure it remains free software for all its users.
|
||||
We, the Free Software Foundation, use the GNU General Public License for most
|
||||
of our software; it applies also to any other work released this way by its
|
||||
authors. You can apply it to your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not price. Our
|
||||
General Public Licenses are designed to make sure that you have the freedom
|
||||
to distribute copies of free software (and charge for them if you wish), that
|
||||
you receive source code or can get it if you want it, that you can change
|
||||
the software or use pieces of it in new free programs, and that you know you
|
||||
can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you these rights
|
||||
or asking you to surrender the rights. Therefore, you have certain responsibilities
|
||||
if you distribute copies of the software, or if you modify it: responsibilities
|
||||
to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether gratis or
|
||||
for a fee, you must pass on to the recipients the same freedoms that you received.
|
||||
You must make sure that they, too, receive or can get the source code. And
|
||||
you must show them these terms so they know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps: (1) assert
|
||||
copyright on the software, and (2) offer you this License giving you legal
|
||||
permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains that
|
||||
there is no warranty for this free software. For both users' and authors'
|
||||
sake, the GPL requires that modified versions be marked as changed, so that
|
||||
their problems will not be attributed erroneously to authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run modified
|
||||
versions of the software inside them, although the manufacturer can do so.
|
||||
This is fundamentally incompatible with the aim of protecting users' freedom
|
||||
to change the software. The systematic pattern of such abuse occurs in the
|
||||
area of products for individuals to use, which is precisely where it is most
|
||||
unacceptable. Therefore, we have designed this version of the GPL to prohibit
|
||||
the practice for those products. If such problems arise substantially in other
|
||||
domains, we stand ready to extend this provision to those domains in future
|
||||
versions of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents. States
|
||||
should not allow patents to restrict development and use of software on general-purpose
|
||||
computers, but in those that do, we wish to avoid the special danger that
|
||||
patents applied to a free program could make it effectively proprietary. To
|
||||
prevent this, the GPL assures that patents cannot be used to render the program
|
||||
non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and modification
|
||||
follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of works,
|
||||
such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this License.
|
||||
Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals
|
||||
or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work in
|
||||
a fashion requiring copyright permission, other than the making of an exact
|
||||
copy. The resulting work is called a "modified version" of the earlier work
|
||||
or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based on the
|
||||
Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without permission,
|
||||
would make you directly or secondarily liable for infringement under applicable
|
||||
copyright law, except executing it on a computer or modifying a private copy.
|
||||
Propagation includes copying, distribution (with or without modification),
|
||||
making available to the public, and in some countries other activities as
|
||||
well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other parties
|
||||
to make or receive copies. Mere interaction with a user through a computer
|
||||
network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices" to the
|
||||
extent that it includes a convenient and prominently visible feature that
|
||||
(1) displays an appropriate copyright notice, and (2) tells the user that
|
||||
there is no warranty for the work (except to the extent that warranties are
|
||||
provided), that licensees may convey the work under this License, and how
|
||||
to view a copy of this License. If the interface presents a list of user commands
|
||||
or options, such as a menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work for making
|
||||
modifications to it. "Object code" means any non-source form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official standard
|
||||
defined by a recognized standards body, or, in the case of interfaces specified
|
||||
for a particular programming language, one that is widely used among developers
|
||||
working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other than
|
||||
the work as a whole, that (a) is included in the normal form of packaging
|
||||
a Major Component, but which is not part of that Major Component, and (b)
|
||||
serves only to enable use of the work with that Major Component, or to implement
|
||||
a Standard Interface for which an implementation is available to the public
|
||||
in source code form. A "Major Component", in this context, means a major essential
|
||||
component (kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to produce
|
||||
the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all the source
|
||||
code needed to generate, install, and (for an executable work) run the object
|
||||
code and to modify the work, including scripts to control those activities.
|
||||
However, it does not include the work's System Libraries, or general-purpose
|
||||
tools or generally available free programs which are used unmodified in performing
|
||||
those activities but which are not part of the work. For example, Corresponding
|
||||
Source includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically linked
|
||||
subprograms that the work is specifically designed to require, such as by
|
||||
intimate data communication or control flow between those subprograms and
|
||||
other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users can regenerate
|
||||
automatically from other parts of the Corresponding Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of copyright
|
||||
on the Program, and are irrevocable provided the stated conditions are met.
|
||||
This License explicitly affirms your unlimited permission to run the unmodified
|
||||
Program. The output from running a covered work is covered by this License
|
||||
only if the output, given its content, constitutes a covered work. This License
|
||||
acknowledges your rights of fair use or other equivalent, as provided by copyright
|
||||
law.
|
||||
|
||||
You may make, run and propagate covered works that you do not convey, without
|
||||
conditions so long as your license otherwise remains in force. You may convey
|
||||
covered works to others for the sole purpose of having them make modifications
|
||||
exclusively for you, or provide you with facilities for running those works,
|
||||
provided that you comply with the terms of this License in conveying all material
|
||||
for which you do not control copyright. Those thus making or running the covered
|
||||
works for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of your copyrighted
|
||||
material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under the conditions
|
||||
stated below. Sublicensing is not allowed; section 10 makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological measure
|
||||
under any applicable law fulfilling obligations under article 11 of the WIPO
|
||||
copyright treaty adopted on 20 December 1996, or similar laws prohibiting
|
||||
or restricting circumvention of such measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid circumvention
|
||||
of technological measures to the extent such circumvention is effected by
|
||||
exercising rights under this License with respect to the covered work, and
|
||||
you disclaim any intention to limit operation or modification of the work
|
||||
as a means of enforcing, against the work's users, your or third parties'
|
||||
legal rights to forbid circumvention of technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you receive
|
||||
it, in any medium, provided that you conspicuously and appropriately publish
|
||||
on each copy an appropriate copyright notice; keep intact all notices stating
|
||||
that this License and any non-permissive terms added in accord with section
|
||||
7 apply to the code; keep intact all notices of the absence of any warranty;
|
||||
and give all recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey, and you
|
||||
may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to produce
|
||||
it from the Program, in the form of source code under the terms of section
|
||||
4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified it, and
|
||||
giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is released under
|
||||
this License and any conditions added under section 7. This requirement modifies
|
||||
the requirement in section 4 to "keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this License to anyone
|
||||
who comes into possession of a copy. This License will therefore apply, along
|
||||
with any applicable section 7 additional terms, to the whole of the work,
|
||||
and all its parts, regardless of how they are packaged. This License gives
|
||||
no permission to license the work in any other way, but it does not invalidate
|
||||
such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display Appropriate
|
||||
Legal Notices; however, if the Program has interactive interfaces that do
|
||||
not display Appropriate Legal Notices, your work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent works,
|
||||
which are not by their nature extensions of the covered work, and which are
|
||||
not combined with it such as to form a larger program, in or on a volume of
|
||||
a storage or distribution medium, is called an "aggregate" if the compilation
|
||||
and its resulting copyright are not used to limit the access or legal rights
|
||||
of the compilation's users beyond what the individual works permit. Inclusion
|
||||
of a covered work in an aggregate does not cause this License to apply to
|
||||
the other parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms of sections
|
||||
4 and 5, provided that you also convey the machine-readable Corresponding
|
||||
Source under the terms of this License, in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product (including
|
||||
a physical distribution medium), accompanied by the Corresponding Source fixed
|
||||
on a durable physical medium customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product (including
|
||||
a physical distribution medium), accompanied by a written offer, valid for
|
||||
at least three years and valid for as long as you offer spare parts or customer
|
||||
support for that product model, to give anyone who possesses the object code
|
||||
either (1) a copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical medium customarily
|
||||
used for software interchange, for a price no more than your reasonable cost
|
||||
of physically performing this conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the written
|
||||
offer to provide the Corresponding Source. This alternative is allowed only
|
||||
occasionally and noncommercially, and only if you received the object code
|
||||
with such an offer, in accord with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated place (gratis
|
||||
or for a charge), and offer equivalent access to the Corresponding Source
|
||||
in the same way through the same place at no further charge. You need not
|
||||
require recipients to copy the Corresponding Source along with the object
|
||||
code. If the place to copy the object code is a network server, the Corresponding
|
||||
Source may be on a different server (operated by you or a third party) that
|
||||
supports equivalent copying facilities, provided you maintain clear directions
|
||||
next to the object code saying where to find the Corresponding Source. Regardless
|
||||
of what server hosts the Corresponding Source, you remain obligated to ensure
|
||||
that it is available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided you inform
|
||||
other peers where the object code and Corresponding Source of the work are
|
||||
being offered to the general public at no charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded from
|
||||
the Corresponding Source as a System Library, need not be included in conveying
|
||||
the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any tangible
|
||||
personal property which is normally used for personal, family, or household
|
||||
purposes, or (2) anything designed or sold for incorporation into a dwelling.
|
||||
In determining whether a product is a consumer product, doubtful cases shall
|
||||
be resolved in favor of coverage. For a particular product received by a particular
|
||||
user, "normally used" refers to a typical or common use of that class of product,
|
||||
regardless of the status of the particular user or of the way in which the
|
||||
particular user actually uses, or expects or is expected to use, the product.
|
||||
A product is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent the
|
||||
only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods, procedures,
|
||||
authorization keys, or other information required to install and execute modified
|
||||
versions of a covered work in that User Product from a modified version of
|
||||
its Corresponding Source. The information must suffice to ensure that the
|
||||
continued functioning of the modified object code is in no case prevented
|
||||
or interfered with solely because modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or specifically
|
||||
for use in, a User Product, and the conveying occurs as part of a transaction
|
||||
in which the right of possession and use of the User Product is transferred
|
||||
to the recipient in perpetuity or for a fixed term (regardless of how the
|
||||
transaction is characterized), the Corresponding Source conveyed under this
|
||||
section must be accompanied by the Installation Information. But this requirement
|
||||
does not apply if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has been installed
|
||||
in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a requirement
|
||||
to continue to provide support service, warranty, or updates for a work that
|
||||
has been modified or installed by the recipient, or for the User Product in
|
||||
which it has been modified or installed. Access to a network may be denied
|
||||
when the modification itself materially and adversely affects the operation
|
||||
of the network or violates the rules and protocols for communication across
|
||||
the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided, in accord
|
||||
with this section must be in a format that is publicly documented (and with
|
||||
an implementation available to the public in source code form), and must require
|
||||
no special password or key for unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this License
|
||||
by making exceptions from one or more of its conditions. Additional permissions
|
||||
that are applicable to the entire Program shall be treated as though they
|
||||
were included in this License, to the extent that they are valid under applicable
|
||||
law. If additional permissions apply only to part of the Program, that part
|
||||
may be used separately under those permissions, but the entire Program remains
|
||||
governed by this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option remove any
|
||||
additional permissions from that copy, or from any part of it. (Additional
|
||||
permissions may be written to require their own removal in certain cases when
|
||||
you modify the work.) You may place additional permissions on material, added
|
||||
by you to a covered work, for which you have or can give appropriate copyright
|
||||
permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you add
|
||||
to a covered work, you may (if authorized by the copyright holders of that
|
||||
material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the terms of
|
||||
sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or author
|
||||
attributions in that material or in the Appropriate Legal Notices displayed
|
||||
by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or requiring
|
||||
that modified versions of such material be marked in reasonable ways as different
|
||||
from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or authors
|
||||
of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some trade names,
|
||||
trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that material by
|
||||
anyone who conveys the material (or modified versions of it) with contractual
|
||||
assumptions of liability to the recipient, for any liability that these contractual
|
||||
assumptions directly impose on those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further restrictions"
|
||||
within the meaning of section 10. If the Program as you received it, or any
|
||||
part of it, contains a notice stating that it is governed by this License
|
||||
along with a term that is a further restriction, you may remove that term.
|
||||
If a license document contains a further restriction but permits relicensing
|
||||
or conveying under this License, you may add to a covered work material governed
|
||||
by the terms of that license document, provided that the further restriction
|
||||
does not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you must place,
|
||||
in the relevant source files, a statement of the additional terms that apply
|
||||
to those files, or a notice indicating where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the form
|
||||
of a separately written license, or stated as exceptions; the above requirements
|
||||
apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly provided
|
||||
under this License. Any attempt otherwise to propagate or modify it is void,
|
||||
and will automatically terminate your rights under this License (including
|
||||
any patent licenses granted under the third paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your license from
|
||||
a particular copyright holder is reinstated (a) provisionally, unless and
|
||||
until the copyright holder explicitly and finally terminates your license,
|
||||
and (b) permanently, if the copyright holder fails to notify you of the violation
|
||||
by some reasonable means prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is reinstated permanently
|
||||
if the copyright holder notifies you of the violation by some reasonable means,
|
||||
this is the first time you have received notice of violation of this License
|
||||
(for any work) from that copyright holder, and you cure the violation prior
|
||||
to 30 days after your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the licenses
|
||||
of parties who have received copies or rights from you under this License.
|
||||
If your rights have been terminated and not permanently reinstated, you do
|
||||
not qualify to receive new licenses for the same material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or run a copy
|
||||
of the Program. Ancillary propagation of a covered work occurring solely as
|
||||
a consequence of using peer-to-peer transmission to receive a copy likewise
|
||||
does not require acceptance. However, nothing other than this License grants
|
||||
you permission to propagate or modify any covered work. These actions infringe
|
||||
copyright if you do not accept this License. Therefore, by modifying or propagating
|
||||
a covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically receives
|
||||
a license from the original licensors, to run, modify and propagate that work,
|
||||
subject to this License. You are not responsible for enforcing compliance
|
||||
by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an organization,
|
||||
or substantially all assets of one, or subdividing an organization, or merging
|
||||
organizations. If propagation of a covered work results from an entity transaction,
|
||||
each party to that transaction who receives a copy of the work also receives
|
||||
whatever licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the Corresponding
|
||||
Source of the work from the predecessor in interest, if the predecessor has
|
||||
it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the rights
|
||||
granted or affirmed under this License. For example, you may not impose a
|
||||
license fee, royalty, or other charge for exercise of rights granted under
|
||||
this License, and you may not initiate litigation (including a cross-claim
|
||||
or counterclaim in a lawsuit) alleging that any patent claim is infringed
|
||||
by making, using, selling, offering for sale, or importing the Program or
|
||||
any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this License
|
||||
of the Program or a work on which the Program is based. The work thus licensed
|
||||
is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims owned or controlled
|
||||
by the contributor, whether already acquired or hereafter acquired, that would
|
||||
be infringed by some manner, permitted by this License, of making, using,
|
||||
or selling its contributor version, but do not include claims that would be
|
||||
infringed only as a consequence of further modification of the contributor
|
||||
version. For purposes of this definition, "control" includes the right to
|
||||
grant patent sublicenses in a manner consistent with the requirements of this
|
||||
License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free patent
|
||||
license under the contributor's essential patent claims, to make, use, sell,
|
||||
offer for sale, import and otherwise run, modify and propagate the contents
|
||||
of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express agreement
|
||||
or commitment, however denominated, not to enforce a patent (such as an express
|
||||
permission to practice a patent or covenant not to sue for patent infringement).
|
||||
To "grant" such a patent license to a party means to make such an agreement
|
||||
or commitment not to enforce a patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license, and the
|
||||
Corresponding Source of the work is not available for anyone to copy, free
|
||||
of charge and under the terms of this License, through a publicly available
|
||||
network server or other readily accessible means, then you must either (1)
|
||||
cause the Corresponding Source to be so available, or (2) arrange to deprive
|
||||
yourself of the benefit of the patent license for this particular work, or
|
||||
(3) arrange, in a manner consistent with the requirements of this License,
|
||||
to extend the patent license to downstream recipients. "Knowingly relying"
|
||||
means you have actual knowledge that, but for the patent license, your conveying
|
||||
the covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that country
|
||||
that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or arrangement,
|
||||
you convey, or propagate by procuring conveyance of, a covered work, and grant
|
||||
a patent license to some of the parties receiving the covered work authorizing
|
||||
them to use, propagate, modify or convey a specific copy of the covered work,
|
||||
then the patent license you grant is automatically extended to all recipients
|
||||
of the covered work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within the scope
|
||||
of its coverage, prohibits the exercise of, or is conditioned on the non-exercise
|
||||
of one or more of the rights that are specifically granted under this License.
|
||||
You may not convey a covered work if you are a party to an arrangement with
|
||||
a third party that is in the business of distributing software, under which
|
||||
you make payment to the third party based on the extent of your activity of
|
||||
conveying the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory patent
|
||||
license (a) in connection with copies of the covered work conveyed by you
|
||||
(or copies made from those copies), or (b) primarily for and in connection
|
||||
with specific products or compilations that contain the covered work, unless
|
||||
you entered into that arrangement, or that patent license was granted, prior
|
||||
to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting any implied
|
||||
license or other defenses to infringement that may otherwise be available
|
||||
to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or otherwise)
|
||||
that contradict the conditions of this License, they do not excuse you from
|
||||
the conditions of this License. If you cannot convey a covered work so as
|
||||
to satisfy simultaneously your obligations under this License and any other
|
||||
pertinent obligations, then as a consequence you may not convey it at all.
|
||||
For example, if you agree to terms that obligate you to collect a royalty
|
||||
for further conveying from those to whom you convey the Program, the only
|
||||
way you could satisfy both those terms and this License would be to refrain
|
||||
entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have permission to
|
||||
link or combine any covered work with a work licensed under version 3 of the
|
||||
GNU Affero General Public License into a single combined work, and to convey
|
||||
the resulting work. The terms of this License will continue to apply to the
|
||||
part which is the covered work, but the special requirements of the GNU Affero
|
||||
General Public License, section 13, concerning interaction through a network
|
||||
will apply to the combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of the
|
||||
GNU General Public License from time to time. Such new versions will be similar
|
||||
in spirit to the present version, but may differ in detail to address new
|
||||
problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the Program specifies
|
||||
that a certain numbered version of the GNU General Public License "or any
|
||||
later version" applies to it, you have the option of following the terms and
|
||||
conditions either of that numbered version or of any later version published
|
||||
by the Free Software Foundation. If the Program does not specify a version
|
||||
number of the GNU General Public License, you may choose any version ever
|
||||
published by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future versions of
|
||||
the GNU General Public License can be used, that proxy's public statement
|
||||
of acceptance of a version permanently authorizes you to choose that version
|
||||
for the Program.
|
||||
|
||||
Later license versions may give you additional or different permissions. However,
|
||||
no additional obligations are imposed on any author or copyright holder as
|
||||
a result of your choosing to follow a later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE
|
||||
LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
|
||||
OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER
|
||||
EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
|
||||
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
|
||||
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM
|
||||
PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR
|
||||
CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL
|
||||
ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM
|
||||
AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL,
|
||||
INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO
|
||||
USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED
|
||||
INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE
|
||||
PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER
|
||||
PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided above cannot
|
||||
be given local legal effect according to their terms, reviewing courts shall
|
||||
apply local law that most closely approximates an absolute waiver of all civil
|
||||
liability in connection with the Program, unless a warranty or assumption
|
||||
of liability accompanies a copy of the Program in return for a fee. END OF
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest possible
|
||||
use to the public, the best way to achieve this is to make it free software
|
||||
which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest to attach
|
||||
them to the start of each source file to most effectively state the exclusion
|
||||
of warranty; and each file should have at least the "copyright" line and a
|
||||
pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify it under
|
||||
the terms of the GNU General Public License as published by the Free Software
|
||||
Foundation, either version 3 of the License, or (at your option) any later
|
||||
version.
|
||||
|
||||
This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
|
||||
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License along with
|
||||
this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short notice like
|
||||
this when it starts in an interactive mode:
|
||||
|
||||
<program> Copyright (C) <year> <name of author>
|
||||
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
|
||||
This is free software, and you are welcome to redistribute it under certain
|
||||
conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands might
|
||||
be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary. For
|
||||
more information on this, and how to apply and follow the GNU GPL, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you may
|
||||
consider it more useful to permit linking proprietary applications with the
|
||||
library. If this is what you want to do, use the GNU Lesser General Public
|
||||
License instead of this License. But first, please read <https://www.gnu.org/
|
||||
licenses /why-not-lgpl.html>.
|
20
LICENSES/MIT.txt
Normal file
20
LICENSES/MIT.txt
Normal file
@ -0,0 +1,20 @@
|
||||
MIT License
|
||||
|
||||
Copyright 2017-2021 The PyPSA-Eur Authors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
@ -4,7 +4,7 @@ SPDX-License-Identifier: CC-BY-4.0
|
||||
-->
|
||||
|
||||
![GitHub release (latest by date including pre-releases)](https://img.shields.io/github/v/release/pypsa/pypsa-eur?include_prereleases)
|
||||
[![Build Status](https://travis-ci.org/PyPSA/pypsa-eur.svg?branch=master)](https://travis-ci.org/PyPSA/pypsa-eur)
|
||||
[![Build Status](https://github.com/pypsa/pypsa-eur/actions/workflows/ci.yaml/badge.svg)](https://github.com/PyPSA/pypsa-eur/actions)
|
||||
[![Documentation](https://readthedocs.org/projects/pypsa-eur/badge/?version=latest)](https://pypsa-eur.readthedocs.io/en/latest/?badge=latest)
|
||||
![Size](https://img.shields.io/github/repo-size/pypsa/pypsa-eur)
|
||||
[![Zenodo](https://zenodo.org/badge/DOI/10.5281/zenodo.3520874.svg)](https://doi.org/10.5281/zenodo.3520874)
|
||||
@ -50,15 +50,15 @@ The dataset consists of:
|
||||
|
||||
- A grid model based on a modified [GridKit](https://github.com/bdw/GridKit)
|
||||
extraction of the [ENTSO-E Transmission System
|
||||
Map](https://www.entsoe.eu/data/map/). The grid model contains 6001 lines
|
||||
Map](https://www.entsoe.eu/data/map/). The grid model contains 6763 lines
|
||||
(alternating current lines at and above 220kV voltage level and all high
|
||||
voltage direct current lines) and 3657 substations.
|
||||
voltage direct current lines) and 3642 substations.
|
||||
- The open power plant database
|
||||
[powerplantmatching](https://github.com/FRESNA/powerplantmatching).
|
||||
- Electrical demand time series from the
|
||||
[OPSD project](https://open-power-system-data.org/).
|
||||
- Renewable time series based on ERA5 and SARAH, assembled using the [atlite tool](https://github.com/FRESNA/atlite).
|
||||
- Geographical potentials for wind and solar generators based on land use (CORINE) and excluding nature reserves (Natura2000) are computed with the [vresutils library](https://github.com/FRESNA/vresutils) and the [glaes library](https://github.com/FZJ-IEK3-VSA/glaes).
|
||||
- Geographical potentials for wind and solar generators based on land use (CORINE) and excluding nature reserves (Natura2000) are computed with the [atlite library](https://github.com/PyPSA/atlite).
|
||||
|
||||
Already-built versions of the model can be found in the accompanying [Zenodo
|
||||
repository](https://doi.org/10.5281/zenodo.3601881).
|
||||
|
100
Snakefile
100
Snakefile
@ -1,9 +1,9 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
from os.path import normpath, exists
|
||||
from shutil import copyfile
|
||||
from shutil import copyfile, move
|
||||
|
||||
from snakemake.remote.HTTP import RemoteProvider as HTTPRemoteProvider
|
||||
HTTP = HTTPRemoteProvider()
|
||||
@ -44,7 +44,7 @@ if config['enable'].get('prepare_links_p_nom', False):
|
||||
output: 'data/links_p_nom.csv'
|
||||
log: 'logs/prepare_links_p_nom.log'
|
||||
threads: 1
|
||||
resources: mem=500
|
||||
resources: mem_mb=500
|
||||
script: 'scripts/prepare_links_p_nom.py'
|
||||
|
||||
|
||||
@ -66,7 +66,20 @@ if config['enable'].get('retrieve_databundle', True):
|
||||
script: 'scripts/retrieve_databundle.py'
|
||||
|
||||
|
||||
rule retrieve_natura_data:
|
||||
input: HTTP.remote("sdi.eea.europa.eu/datashare/s/H6QGCybMdLLnywo/download", additional_request_string="?path=%2FNatura2000_end2020_gpkg&files=Natura2000_end2020.gpkg", static=True)
|
||||
output: "data/Natura2000_end2020.gpkg"
|
||||
run: move(input[0], output[0])
|
||||
|
||||
|
||||
rule retrieve_load_data:
|
||||
input: HTTP.remote("data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv", keep_local=True, static=True)
|
||||
output: "data/load_raw.csv"
|
||||
run: move(input[0], output[0])
|
||||
|
||||
|
||||
rule build_load_data:
|
||||
input: "data/load_raw.csv"
|
||||
output: "resources/load.csv"
|
||||
log: "logs/build_load_data.log"
|
||||
script: 'scripts/build_load_data.py'
|
||||
@ -78,7 +91,7 @@ rule build_powerplants:
|
||||
output: "resources/powerplants.csv"
|
||||
log: "logs/build_powerplants.log"
|
||||
threads: 1
|
||||
resources: mem=500
|
||||
resources: mem_mb=500
|
||||
script: "scripts/build_powerplants.py"
|
||||
|
||||
|
||||
@ -99,7 +112,7 @@ rule base_network:
|
||||
log: "logs/base_network.log"
|
||||
benchmark: "benchmarks/base_network"
|
||||
threads: 1
|
||||
resources: mem=500
|
||||
resources: mem_mb=500
|
||||
script: "scripts/base_network.py"
|
||||
|
||||
|
||||
@ -119,7 +132,7 @@ rule build_shapes:
|
||||
nuts3_shapes='resources/nuts3_shapes.geojson'
|
||||
log: "logs/build_shapes.log"
|
||||
threads: 1
|
||||
resources: mem=500
|
||||
resources: mem_mb=500
|
||||
script: "scripts/build_shapes.py"
|
||||
|
||||
|
||||
@ -133,7 +146,7 @@ rule build_bus_regions:
|
||||
regions_offshore="resources/regions_offshore.geojson"
|
||||
log: "logs/build_bus_regions.log"
|
||||
threads: 1
|
||||
resources: mem=1000
|
||||
resources: mem_mb=1000
|
||||
script: "scripts/build_bus_regions.py"
|
||||
|
||||
if config['enable'].get('build_cutout', False):
|
||||
@ -145,32 +158,15 @@ if config['enable'].get('build_cutout', False):
|
||||
log: "logs/build_cutout/{cutout}.log"
|
||||
benchmark: "benchmarks/build_cutout_{cutout}"
|
||||
threads: ATLITE_NPROCESSES
|
||||
resources: mem=ATLITE_NPROCESSES * 1000
|
||||
resources: mem_mb=ATLITE_NPROCESSES * 1000
|
||||
script: "scripts/build_cutout.py"
|
||||
|
||||
|
||||
if config['enable'].get('retrieve_cutout', True):
|
||||
rule retrieve_cutout:
|
||||
input: HTTP.remote("zenodo.org/record/4709858/files/{cutout}.nc", keep_local=True)
|
||||
input: HTTP.remote("zenodo.org/record/6382570/files/{cutout}.nc", keep_local=True, static=True)
|
||||
output: "cutouts/{cutout}.nc"
|
||||
shell: "mv {input} {output}"
|
||||
|
||||
|
||||
if config['enable'].get('build_natura_raster', False):
|
||||
rule build_natura_raster:
|
||||
input:
|
||||
natura="data/bundle/natura/Natura2000_end2015.shp",
|
||||
cutouts=expand("cutouts/{cutouts}.nc", **config['atlite'])
|
||||
output: "resources/natura.tiff"
|
||||
log: "logs/build_natura_raster.log"
|
||||
script: "scripts/build_natura_raster.py"
|
||||
|
||||
|
||||
if config['enable'].get('retrieve_natura_raster', True):
|
||||
rule retrieve_natura_raster:
|
||||
input: HTTP.remote("zenodo.org/record/4706686/files/natura.tiff", keep_local=True)
|
||||
output: "resources/natura.tiff"
|
||||
shell: "mv {input} {output}"
|
||||
run: move(input[0], output[0])
|
||||
|
||||
if config['enable'].get('retrieve_cost_data', True):
|
||||
rule retrieve_cost_data:
|
||||
@ -185,7 +181,9 @@ rule build_renewable_profiles:
|
||||
input:
|
||||
base_network="networks/base.nc",
|
||||
corine="data/bundle/corine/g250_clc06_V18_5.tif",
|
||||
natura="resources/natura.tiff",
|
||||
natura=lambda w: ("data/Natura2000_end2020.gpkg"
|
||||
if config["renewable"][w.technology]["natura"]
|
||||
else []),
|
||||
gebco=lambda w: ("data/bundle/GEBCO_2014_2D.nc"
|
||||
if "max_depth" in config["renewable"][w.technology].keys()
|
||||
else []),
|
||||
@ -199,20 +197,20 @@ rule build_renewable_profiles:
|
||||
log: "logs/build_renewable_profile_{technology}.log"
|
||||
benchmark: "benchmarks/build_renewable_profiles_{technology}"
|
||||
threads: ATLITE_NPROCESSES
|
||||
resources: mem=ATLITE_NPROCESSES * 5000
|
||||
resources: mem_mb=ATLITE_NPROCESSES * 5000
|
||||
wildcard_constraints: technology="(?!hydro).*" # Any technology other than hydro
|
||||
script: "scripts/build_renewable_profiles.py"
|
||||
|
||||
|
||||
if 'hydro' in config['renewable'].keys():
|
||||
rule build_hydro_profile:
|
||||
input:
|
||||
country_shapes='resources/country_shapes.geojson',
|
||||
eia_hydro_generation='data/bundle/EIA_hydro_generation_2000_2014.csv',
|
||||
cutout="cutouts/" + config["renewable"]['hydro']['cutout'] + ".nc"
|
||||
output: 'resources/profile_hydro.nc'
|
||||
log: "logs/build_hydro_profile.log"
|
||||
resources: mem=5000
|
||||
script: 'scripts/build_hydro_profile.py'
|
||||
rule build_hydro_profile:
|
||||
input:
|
||||
country_shapes='resources/country_shapes.geojson',
|
||||
eia_hydro_generation='data/bundle/EIA_hydro_generation_2000_2014.csv',
|
||||
cutout=f"cutouts/{config['renewable']['hydro']['cutout']}.nc" if "hydro" in config["renewable"] else "config['renewable']['hydro']['cutout'] not configured",
|
||||
output: 'resources/profile_hydro.nc'
|
||||
log: "logs/build_hydro_profile.log"
|
||||
resources: mem_mb=5000
|
||||
script: 'scripts/build_hydro_profile.py'
|
||||
|
||||
|
||||
rule add_electricity:
|
||||
@ -231,7 +229,7 @@ rule add_electricity:
|
||||
log: "logs/add_electricity.log"
|
||||
benchmark: "benchmarks/add_electricity"
|
||||
threads: 1
|
||||
resources: mem=3000
|
||||
resources: mem_mb=5000
|
||||
script: "scripts/add_electricity.py"
|
||||
|
||||
|
||||
@ -245,11 +243,12 @@ rule simplify_network:
|
||||
network='networks/elec_s{simpl}.nc',
|
||||
regions_onshore="resources/regions_onshore_elec_s{simpl}.geojson",
|
||||
regions_offshore="resources/regions_offshore_elec_s{simpl}.geojson",
|
||||
busmap='resources/busmap_elec_s{simpl}.csv'
|
||||
busmap='resources/busmap_elec_s{simpl}.csv',
|
||||
connection_costs='resources/connection_costs_s{simpl}.csv'
|
||||
log: "logs/simplify_network/elec_s{simpl}.log"
|
||||
benchmark: "benchmarks/simplify_network/elec_s{simpl}"
|
||||
threads: 1
|
||||
resources: mem=4000
|
||||
resources: mem_mb=4000
|
||||
script: "scripts/simplify_network.py"
|
||||
|
||||
|
||||
@ -271,7 +270,7 @@ rule cluster_network:
|
||||
log: "logs/cluster_network/elec_s{simpl}_{clusters}.log"
|
||||
benchmark: "benchmarks/cluster_network/elec_s{simpl}_{clusters}"
|
||||
threads: 1
|
||||
resources: mem=3000
|
||||
resources: mem_mb=6000
|
||||
script: "scripts/cluster_network.py"
|
||||
|
||||
|
||||
@ -283,7 +282,7 @@ rule add_extra_components:
|
||||
log: "logs/add_extra_components/elec_s{simpl}_{clusters}.log"
|
||||
benchmark: "benchmarks/add_extra_components/elec_s{simpl}_{clusters}_ec"
|
||||
threads: 1
|
||||
resources: mem=3000
|
||||
resources: mem_mb=3000
|
||||
script: "scripts/add_extra_components.py"
|
||||
|
||||
|
||||
@ -293,7 +292,7 @@ rule prepare_network:
|
||||
log: "logs/prepare_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.log"
|
||||
benchmark: "benchmarks/prepare_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}"
|
||||
threads: 1
|
||||
resources: mem=4000
|
||||
resources: mem_mb=4000
|
||||
script: "scripts/prepare_network.py"
|
||||
|
||||
|
||||
@ -311,6 +310,8 @@ def memory(w):
|
||||
break
|
||||
if w.clusters.endswith('m'):
|
||||
return int(factor * (18000 + 180 * int(w.clusters[:-1])))
|
||||
elif w.clusters == "all":
|
||||
return int(factor * (18000 + 180 * 4000))
|
||||
else:
|
||||
return int(factor * (10000 + 195 * int(w.clusters)))
|
||||
|
||||
@ -324,8 +325,8 @@ rule solve_network:
|
||||
memory="logs/solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_memory.log"
|
||||
benchmark: "benchmarks/solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}"
|
||||
threads: 4
|
||||
resources: mem=memory
|
||||
shadow: "shallow"
|
||||
resources: mem_mb=memory
|
||||
shadow: "minimal"
|
||||
script: "scripts/solve_network.py"
|
||||
|
||||
|
||||
@ -340,8 +341,8 @@ rule solve_operations_network:
|
||||
memory="logs/solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_memory.log"
|
||||
benchmark: "benchmarks/solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}"
|
||||
threads: 4
|
||||
resources: mem=(lambda w: 5000 + 372 * int(w.clusters))
|
||||
shadow: "shallow"
|
||||
resources: mem_mb=(lambda w: 5000 + 372 * int(w.clusters))
|
||||
shadow: "minimal"
|
||||
script: "scripts/solve_operations_network.py"
|
||||
|
||||
|
||||
@ -366,7 +367,6 @@ def input_make_summary(w):
|
||||
ll = w.ll
|
||||
return ([COSTS] +
|
||||
expand("results/networks/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.nc",
|
||||
network=w.network,
|
||||
ll=ll,
|
||||
**{k: config["scenario"][k] if getattr(w, k) == "all" else getattr(w, k)
|
||||
for k in ["simpl", "clusters", "opts"]}))
|
||||
|
@ -2,7 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: CC0-1.0
|
||||
|
||||
version: 0.3.0
|
||||
version: 0.4.0
|
||||
tutorial: false
|
||||
|
||||
logging:
|
||||
@ -19,6 +19,10 @@ scenario:
|
||||
|
||||
countries: ['AL', 'AT', 'BA', 'BE', 'BG', 'CH', 'CZ', 'DE', 'DK', 'EE', 'ES', 'FI', 'FR', 'GB', 'GR', 'HR', 'HU', 'IE', 'IT', 'LT', 'LU', 'LV', 'ME', 'MK', 'NL', 'NO', 'PL', 'PT', 'RO', 'RS', 'SE', 'SI', 'SK']
|
||||
|
||||
clustering:
|
||||
simplify:
|
||||
to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections)
|
||||
|
||||
snapshots:
|
||||
start: "2013-01-01"
|
||||
end: "2014-01-01"
|
||||
@ -30,14 +34,12 @@ enable:
|
||||
retrieve_cost_data: true
|
||||
build_cutout: false
|
||||
retrieve_cutout: true
|
||||
build_natura_raster: false
|
||||
retrieve_natura_raster: true
|
||||
custom_busmap: false
|
||||
|
||||
electricity:
|
||||
voltages: [220., 300., 380.]
|
||||
co2limit: 7.75e+7 # 0.05 * 3.1e9*0.5
|
||||
co2base: 1.487e9
|
||||
co2base: 1.487e+9
|
||||
agg_p_nom_limits: data/agg_p_nom_minmax.csv
|
||||
|
||||
extendable_carriers:
|
||||
@ -108,8 +110,11 @@ renewable:
|
||||
resource:
|
||||
method: wind
|
||||
turbine: NREL_ReferenceTurbine_5MW_offshore
|
||||
capacity_per_sqkm: 3
|
||||
# correction_factor: 0.93
|
||||
capacity_per_sqkm: 2
|
||||
correction_factor: 0.8855
|
||||
# proxy for wake losses
|
||||
# from 10.1016/j.energy.2018.08.153
|
||||
# until done more rigorously in #153
|
||||
corine: [44, 255]
|
||||
natura: true
|
||||
max_depth: 50
|
||||
@ -122,8 +127,11 @@ renewable:
|
||||
method: wind
|
||||
turbine: NREL_ReferenceTurbine_5MW_offshore
|
||||
# ScholzPhd Tab 4.3.1: 10MW/km^2
|
||||
capacity_per_sqkm: 3
|
||||
# correction_factor: 0.93
|
||||
capacity_per_sqkm: 2
|
||||
correction_factor: 0.8855
|
||||
# proxy for wake losses
|
||||
# from 10.1016/j.energy.2018.08.153
|
||||
# until done more rigorously in #153
|
||||
corine: [44, 255]
|
||||
natura: true
|
||||
max_depth: 50
|
||||
@ -139,12 +147,14 @@ renewable:
|
||||
slope: 35.
|
||||
azimuth: 180.
|
||||
capacity_per_sqkm: 1.7 # ScholzPhd Tab 4.3.1: 170 MW/km^2
|
||||
# Determined by comparing uncorrected area-weighted full-load hours to those
|
||||
# Correction factor determined by comparing uncorrected area-weighted full-load hours to those
|
||||
# published in Supplementary Data to
|
||||
# Pietzcker, Robert Carl, et al. "Using the sun to decarbonize the power
|
||||
# sector: The economic potential of photovoltaics and concentrating solar
|
||||
# power." Applied Energy 135 (2014): 704-720.
|
||||
correction_factor: 0.854337
|
||||
# This correction factor of 0.854337 may be in order if using reanalysis data.
|
||||
# for discussion refer to https://github.com/PyPSA/pypsa-eur/pull/304
|
||||
# correction_factor: 0.854337
|
||||
corine: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
|
||||
14, 15, 16, 17, 18, 19, 20, 26, 31, 32]
|
||||
natura: true
|
||||
@ -179,8 +189,7 @@ transformers:
|
||||
type: ''
|
||||
|
||||
load:
|
||||
url: https://data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv
|
||||
power_statistics: True # only for files from <2019; set false in order to get ENTSOE transparency data
|
||||
power_statistics: True # only for files from <2019; set false in order to get ENTSOE transparency data
|
||||
interpolate_limit: 3 # data gaps up until this size are interpolated linearly
|
||||
time_shift_for_large_gaps: 1w # data gaps up until this size are copied by copying from
|
||||
manual_adjustments: true # false
|
||||
@ -238,8 +247,8 @@ solving:
|
||||
# threads: 4
|
||||
# lpmethod: 4 # barrier
|
||||
# solutiontype: 2 # non basic solution, ie no crossover
|
||||
# barrier_convergetol: 1.e-5
|
||||
# feasopt_tolerance: 1.e-6
|
||||
# barrier.convergetol: 1.e-5
|
||||
# feasopt.tolerance: 1.e-6
|
||||
|
||||
plotting:
|
||||
map:
|
||||
|
@ -2,7 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: CC0-1.0
|
||||
|
||||
version: 0.3.0
|
||||
version: 0.4.0
|
||||
tutorial: true
|
||||
|
||||
logging:
|
||||
@ -17,7 +17,11 @@ scenario:
|
||||
clusters: [5]
|
||||
opts: [Co2L-24H]
|
||||
|
||||
countries: ['DE']
|
||||
countries: ['BE']
|
||||
|
||||
clustering:
|
||||
simplify:
|
||||
to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections)
|
||||
|
||||
snapshots:
|
||||
start: "2013-03-01"
|
||||
@ -30,8 +34,6 @@ enable:
|
||||
retrieve_cost_data: true
|
||||
build_cutout: false
|
||||
retrieve_cutout: true
|
||||
build_natura_raster: false
|
||||
retrieve_natura_raster: true
|
||||
custom_busmap: false
|
||||
|
||||
electricity:
|
||||
@ -55,7 +57,7 @@ electricity:
|
||||
atlite:
|
||||
nprocesses: 4
|
||||
cutouts:
|
||||
europe-2013-era5-tutorial:
|
||||
be-03-2013-era5:
|
||||
module: era5
|
||||
x: [4., 15.]
|
||||
y: [46., 56.]
|
||||
@ -63,7 +65,7 @@ atlite:
|
||||
|
||||
renewable:
|
||||
onwind:
|
||||
cutout: europe-2013-era5-tutorial
|
||||
cutout: be-03-2013-era5
|
||||
resource:
|
||||
method: wind
|
||||
turbine: Vestas_V112_3MW
|
||||
@ -76,23 +78,23 @@ renewable:
|
||||
24, 25, 26, 27, 28, 29, 31, 32]
|
||||
distance: 1000
|
||||
distance_grid_codes: [1, 2, 3, 4, 5, 6]
|
||||
natura: true
|
||||
natura: false
|
||||
potential: simple # or conservative
|
||||
clip_p_max_pu: 1.e-2
|
||||
offwind-ac:
|
||||
cutout: europe-2013-era5-tutorial
|
||||
cutout: be-03-2013-era5
|
||||
resource:
|
||||
method: wind
|
||||
turbine: NREL_ReferenceTurbine_5MW_offshore
|
||||
capacity_per_sqkm: 3
|
||||
# correction_factor: 0.93
|
||||
corine: [44, 255]
|
||||
natura: true
|
||||
natura: false
|
||||
max_shore_distance: 30000
|
||||
potential: simple # or conservative
|
||||
clip_p_max_pu: 1.e-2
|
||||
offwind-dc:
|
||||
cutout: europe-2013-era5-tutorial
|
||||
cutout: be-03-2013-era5
|
||||
resource:
|
||||
method: wind
|
||||
turbine: NREL_ReferenceTurbine_5MW_offshore
|
||||
@ -100,12 +102,12 @@ renewable:
|
||||
capacity_per_sqkm: 3
|
||||
# correction_factor: 0.93
|
||||
corine: [44, 255]
|
||||
natura: true
|
||||
natura: false
|
||||
min_shore_distance: 30000
|
||||
potential: simple # or conservative
|
||||
clip_p_max_pu: 1.e-2
|
||||
solar:
|
||||
cutout: europe-2013-era5-tutorial
|
||||
cutout: be-03-2013-era5
|
||||
resource:
|
||||
method: pv
|
||||
panel: CSi
|
||||
@ -113,15 +115,16 @@ renewable:
|
||||
slope: 35.
|
||||
azimuth: 180.
|
||||
capacity_per_sqkm: 1.7 # ScholzPhd Tab 4.3.1: 170 MW/km^2
|
||||
# Determined by comparing uncorrected area-weighted full-load hours to those
|
||||
# Correction factor determined by comparing uncorrected area-weighted full-load hours to those
|
||||
# published in Supplementary Data to
|
||||
# Pietzcker, Robert Carl, et al. "Using the sun to decarbonize the power
|
||||
# sector: The economic potential of photovoltaics and concentrating solar
|
||||
# power." Applied Energy 135 (2014): 704-720.
|
||||
correction_factor: 0.854337
|
||||
# This correction factor of 0.854337 may be in order if using reanalysis data.
|
||||
# correction_factor: 0.854337
|
||||
corine: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
|
||||
14, 15, 16, 17, 18, 19, 20, 26, 31, 32]
|
||||
natura: true
|
||||
natura: false
|
||||
potential: simple # or conservative
|
||||
clip_p_max_pu: 1.e-2
|
||||
|
||||
@ -147,7 +150,6 @@ transformers:
|
||||
type: ''
|
||||
|
||||
load:
|
||||
url: https://data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv
|
||||
power_statistics: True # only for files from <2019; set false in order to get ENTSOE transparency data
|
||||
interpolate_limit: 3 # data gaps up until this size are interpolated linearly
|
||||
time_shift_for_large_gaps: 1w # data gaps up until this size are copied by copying from
|
||||
|
@ -1,6 +1,6 @@
|
||||
# Unofficial ENTSO-E dataset processed by GridKit
|
||||
|
||||
This dataset was generated based on a map extract from May 25, 2018.
|
||||
This dataset was generated based on a map extract from March 2022.
|
||||
This is an _unofficial_ extract of the
|
||||
[ENTSO-E interactive map](https://www.entsoe.eu/data/map/)
|
||||
of the European power system (including to a limited extent North
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,4 +1,7 @@
|
||||
converter_id,bus0,bus1
|
||||
6168,5442,5443
|
||||
7290,6388,6389
|
||||
2232,1837,1836
|
||||
2349,1900,1902
|
||||
2288,1869,1871
|
||||
6342,5544,5545
|
||||
2349,1900,1901
|
||||
7484,6523,6524
|
||||
2349,1901,1902
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,56 +1,63 @@
|
||||
link_id,bus0,bus1,length,underground,under_construction,tags,geometry
|
||||
5622,5,95,362337.247822072,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32523", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>" ", "symbol"=>"DC-Line", "country"=>"SA", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(39.6360309199414 24.7771549364779,40.253906 27.059126,40.4982466187365 27.9539936298126)'
|
||||
5995,1215,1217,11419.5508883069,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"36200", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"None", "symbol"=>"DC-Line", "country"=>"TR", "t9_code"=>"None", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(26.680298 40.313043,26.5557438215552 40.3516481544774)'
|
||||
14547,1261,1277,272366.529693327,f,f,,'LINESTRING(21.0892517161789 39.3174860744064,20.867157 39.426647,20.537567 39.544294,20.037689 39.647997,19.980011 39.678655,19.974518 39.748378,19.947052 39.807481,19.871521 39.834905,19.730072 39.852829,19.27002 39.898148,19.234314 39.900255,18.625946 39.932907,18.1732034663802 39.9781277204571)'
|
||||
12997,1333,7276,92513.5411955304,f,f,,'LINESTRING(8.21673410679486 40.9126998173886,8.31665 40.979898,8.55560299999999 41.139365,9.04861499999999 41.276774,9.17007598089397 41.296762471629)'
|
||||
5632,1361,2309,76847.0139826037,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32533", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"200", "symbol"=>"DC-Line", "country"=>"IT", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(8.67675371049727 40.6777653795244,9.03900099999999 40.979898,9.22164899999999 41.133159,9.19977299501706 41.2082924934473)'
|
||||
5686,1406,1623,234733.218840324,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32590", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"Rómulo", "symbol"=>"DC-Line", "country"=>"ES", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(2.48932993486183 39.561252379133,1.13159199999999 39.610978,0 39.710356,-0.234388957535875 39.7314420592468)'
|
||||
14539,2077,2186,391819.608605717,f,t,,'LINESTRING(14.0986517070226 42.4133438660838,14.412689 42.431566,15.115814 42.363618,16.269379 42.067646,16.875 42.126747,16.962891 42.135913,18.531189 42.271212,18.7271798293119 42.3522936900005)'
|
||||
12998,2190,7276,316517.539537871,f,f,,'LINESTRING(9.17009350125146 41.2967653544603,9.38095099999999 41.331451,9.858856 41.352072,10.70755 41.479776,11.25 41.448903,12.100067 41.432431,12.380219 41.426253,12.418671 41.401536,12.704315 41.347948,12.805939 41.368564,12.9016442293009 41.3921592955445)'
|
||||
5621,2306,2307,24868.4258834249,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32521", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>" ", "symbol"=>"DC-Line", "country"=>"FR", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(9.37679000208623 42.7053229039427,9.357605 42.552069,9.45054814341409 42.5389781005166)'
|
||||
14545,2306,2310,103628.671904731,f,f,,'LINESTRING(9.37725891362686 42.7057449479108,9.79980499999999 42.799431,10.5931379465185 42.9693952059839)'
|
||||
5631,2307,2308,130349.805131517,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32532", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"200", "symbol"=>"DC-Line", "country"=>"FR", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(9.45062783092712 42.538721099255,9.35348499999999 42.486277,9.385071 42.11758,9.13387617519614 41.4276065839255)'
|
||||
5627,2308,7276,14773.4696528853,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32528", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>" ", "symbol"=>"DC-Line", "country"=>"FR", "t9_code"=>"95.1.1", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"1", "oneCircuit"=>"0", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(9.1338505292182 41.4269535813022,9.168091 41.303603,9.17008474107272 41.2967639130447)'
|
||||
5628,2309,7276,11623.019620339,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32529", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>" ", "symbol"=>"DC-Line", "country"=>"IT", "t9_code"=>"95.1.1", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"1", "oneCircuit"=>"0", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(9.17008474107272 41.2967639130447,9.168091 41.303603,9.18319700000001 41.250968,9.1995514318356 41.2089447559651)'
|
||||
14562,2403,2510,45367.7245799963,f,f,,'LINESTRING(2.98259070757654 42.2776059846425,2.90313700000001 42.397094,2.867432 42.467032,2.77404800000001 42.655172)'
|
||||
14538,4622,4690,50206.4589537583,f,t,,'LINESTRING(6.43068069229957 50.8136946409214,6.020508 50.766865,5.925751 50.755572,5.73118285928413 50.7304278585398)'
|
||||
14550,4882,4972,232745.802729813,f,f,,'LINESTRING(4.04528166772434 51.9611233898246,2.41561900000001 51.702353,0.794192405058928 51.4189824547604)'
|
||||
14537,4885,4977,138366.495800613,f,t,,'LINESTRING(3.25699821065925 51.2773723094758,1.92947399999999 51.251601,1.27623412238205 51.2327009391635)'
|
||||
14563,4887,4893,52725.5506558225,f,f,,'LINESTRING(1.75051314494826 50.9186901861196,1.43508900000001 50.970535,1.02353536683349 51.0370060560335)'
|
||||
14542,5518,5723,49985.3600979311,t,f,,'LINESTRING(11.2853515176222 55.5308076825323,11.25 55.515415,11.185455 55.488191,11.134644 55.483523,10.735016 55.405629,10.5537191820378 55.3715610145456)'
|
||||
14551,5540,5580,148283.118314702,t,f,,'LINESTRING(11.9669051857726 55.421747727341,12.087708 54.770593,12.13028 54.564896,12.236023 54.258807,12.267609 54.102086)'
|
||||
5645,5567,7278,56022.9511788432,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32550", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"Baltic Cable 400 kV", "symbol"=>"DC-Line", "country"=>"SE", "t9_code"=>"401.1.1", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"1", "oneCircuit"=>"0", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(12.9299984288384 55.0630403498842,12.947388 55.077581,13.002319 55.155336,13.095703 55.379891,13.1752359988911 55.5447059797474)'
|
||||
14560,5486,6227,223028.828051207,f,f,,'LINESTRING(16.861267 54.533833,16.274872 55.092515,16.022186 55.289283,15.37674 55.776573,14.8455441317166 56.1736465974452)'
|
||||
5609,5630,3330,165019.269173818,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32508", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"BorWin 2", "symbol"=>"DC-Line", "country"=>"DE", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(6.86000811621652 54.3820648628198,7.274323 54.290882,7.288055 53.860626,7.52975500000001 53.417717,7.454224 53.153359,7.39105200000001 53.107217)'
|
||||
5646,5631,3330,157422.786637004,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32551", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"BorWin 1", "symbol"=>"DC-Line", "country"=>"DE", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(6.85179661058644 54.3229401608779,6.929626 54.324533,7.209778 54.266026,7.22213700000001 53.866295,7.46932999999999 53.399707,7.39105200000001 53.107217,7.38971611765728 53.0975905041478)'
|
||||
12689,5634,7963,147059.541473767,f,t,,'LINESTRING(7.09421654321873 54.4442608229811,7.044983 54.388555,6.59042399999999 53.907574,6.64672900000001 53.683695,6.594543 53.59821,6.707153 53.530513,7.15458909584828 53.4027444956413)'
|
||||
14561,5635,6886,155676.108968138,f,f,,'LINESTRING(9.32702542887196 53.9319881402015,9.06784100000001 54.038425,8.346863 54.26763,8.08181799999999 54.59355,7.88268999999999 54.8355,7.78817109722772 54.9221888624046)'
|
||||
5658,5636,6886,90848.7767557952,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32562", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"HelWin 2", "symbol"=>"DC-Line", "country"=>"DE", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(8.05753950747769 54.25154162105,8.20541400000001 54.23634,9.10354599999999 53.970628,9.32696419621539 53.9319113429646)'
|
||||
5617,5637,6886,89346.6337548304,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32517", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"HelWin 1", "symbol"=>"DC-Line", "country"=>"DE", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(8.12610708224912 54.310749538123,8.238373 54.256401,9.32699442549698 53.9319562532009)'
|
||||
5612,5638,5673,139209.866527364,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32512", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"DolWin 1", "symbol"=>"DC-Line", "country"=>"DE", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(6.84493115764205 53.880869,6.909027 53.880869,7.116394 53.835512,7.36358600000001 53.396432,7.32101399999999 53.112163,7.33612100000001 52.893992,7.16075117704058 52.8485079587114)'
|
||||
5615,5639,8464,99066.5793764307,f,t,'"MW"=>"None", "TSO"=>"None", "oid"=>"32515", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"DolWin 3", "symbol"=>"DC-Line", "country"=>"DE", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(6.84423599483409 53.8134043878533,6.71127300000001 53.693454,6.65634200000001 53.59821,6.73461900000001 53.55581,7.112274 53.45126,7.05596900000001 53.340713,7.237244 53.26932,7.223511 53.18135,7.223511 53.1805270078955)'
|
||||
14540,5645,5687,280301.445474794,f,t,,'LINESTRING(6.75668661933496 53.437616158174,6.838989 53.664171,6.96258499999999 53.785238,7.34298700000001 53.882488,7.80029300000001 54.517096,8.20678699999999 55.297102,8.86005375885099 55.4336013425692)'
|
||||
14558,5643,6237,575352.425009444,f,f,,'LINESTRING(6.83036734046461 53.4374933986115,6.253967 53.645452,6.33636499999999 55.776573,6.34597800000001 56.029855,6.34597800000001 56.030622,6.43661500000001 58.130121,6.90176957000565 58.2653404287817)'
|
||||
5613,5662,5673,131420.09609615,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32513", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"DolWin 2", "symbol"=>"DC-Line", "country"=>"DE", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(7.11083415172816 53.9630966319811,7.07107499999999 53.80795,7.301788 53.39807,7.267456 53.110514,7.29354899999999 52.907246,7.16070024970726 52.8485606886388)'
|
||||
14584,5739,6232,574884.998052791,f,t,,'LINESTRING(6.81690675921544 58.6338502746805,6.63024900000001 58.249559,6.78268399999999 57.579197,7.17544599999999 56.532986,7.17407200000001 56.5345,7.46521000000001 55.776573,7.46521000000001 55.776573,7.64099100000001 55.312736,8.458099 54.316523,9.394684 53.934262)'
|
||||
5626,5773,5951,59184.4227659405,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32527", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>" ", "symbol"=>"DC-Line", "country"=>"UK", "t9_code"=>"222.1.2", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"1", "oneCircuit"=>"0", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(-4.94702447012386 55.0727948492206,-5.137482 55.042188,-5.62500000000001 54.890036,-5.631866 54.887667,-5.7332134509551 54.813550429852)'
|
||||
5625,5773,5951,58741.4601812995,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32526", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>" ", "symbol"=>"DC-Line", "country"=>"UK", "t9_code"=>"222.1.1", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"1", "oneCircuit"=>"0", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(-4.94689333475508 55.0726735779237,-5.045471 55.009914,-5.59616099999999 54.840245,-5.62500000000001 54.834709,-5.73306677066227 54.8134313531551)'
|
||||
8068,5777,5816,363085.503577327,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"70191", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"Western HVDC link", "symbol"=>"DC-Line", "country"=>"UK", "t9_code"=>"None", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(-3.18595885129092 53.213699479605,-3.158569 53.308724,-3.40988200000001 53.511735,-4.081421 53.803084,-5.158081 54.013418,-5.28442399999999 54.866334,-5.177307 55.345546,-4.88616899999999 55.586883,-4.8806877889882 55.7044245716822)'
|
||||
14552,5817,5965,242400.41935291,f,f,,'LINESTRING(-3.12293971810515 53.2087645354697,-3.13934300000001 53.266034,-3.368683 53.377594,-5.18280000000001 53.495399,-5.62500000000001 53.519084,-5.62500000000001 53.519084,-6.101532 53.503568,-6.61057668606004 53.483977180569)'
|
||||
14541,5829,6547,695432.776022422,f,t,,'LINESTRING(6.64773945778347 59.5995729910866,6.483307 59.539192,6.374817 59.538495,6.24847399999999 59.510636,6.196289 59.448566,5.898285 59.321981,5.64697299999999 59.234284,5.62500000000001 59.223042,4.81338500000001 58.813742,2.03384400000001 57.374679,0 56.170023,-0.650940000000012 55.776573,-1.55838055228731 55.2221613174321)'
|
||||
5681,6177,6224,93313.2906756649,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32585", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"150", "symbol"=>"DC-Line", "country"=>"SE", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(18.2272491895352 57.5711315582343,17.274628 57.645401,16.875 57.674052,16.6818074486274 57.692364166947)'
|
||||
14531,6187,6290,122935.90852816,f,f,,'LINESTRING(12.0227887239052 57.5613889668514,11.56723 57.399104,11.25 57.282754,11.174469 57.255281,11.001434 57.197296,10.740509 57.15263,10.578461 57.16678,10.366974 57.123569,10.2163571716117 57.1310010356663)'
|
||||
14556,6187,6290,122337.134741418,f,f,,'LINESTRING(10.2163282994747 57.1311139024238,10.567474 57.20771,10.737762 57.192832,10.972595 57.230016,11.25 57.33171,11.532898 57.436081,11.867981 57.556366,12.0227165657676 57.561507168045)'
|
||||
14543,6215,6219,231949.272290748,t,f,,'LINESTRING(13.5805221045065 55.8609194660763,13.8414 57.218121,14.543152 57.740083,14.6252826086631 57.7992163579132)'
|
||||
11511,6241,6241,5210.95506577122,f,f,,'LINESTRING(8.1598074770784 58.2085243278292,8.10270452292159 58.2443198548829)'
|
||||
14559,6241,6271,221531.577587461,t,f,,'LINESTRING(9.55911260232728 56.517221415454,9.31778000000001 56.610909,9.37408399999999 56.871495,9.14611800000001 57.139965,9.13650499999999 57.15263,8.728638 57.517298,8.728638 57.51656,8.072205 58.088041,8.10237170729511 58.2442659573338)'
|
||||
14564,6241,6271,210556.812324662,f,f,,'LINESTRING(9.5593768244562 56.5173195981253,9.503174 56.649432,9.540253 56.875247,9.35348499999999 57.103436,9.27932699999999 57.19804,8.83300799999999 57.593919,8.26446499999999 58.085137,8.1602778860472 58.2081284789676)'
|
||||
14546,6241,6271,212662.44654471,f,f,,'LINESTRING(9.5592153675061 56.5172786524048,9.408417 56.631308,9.456482 56.875247,9.27108800000001 57.092992,9.20654300000001 57.172736,8.779449 57.554155,8.143616 58.107636,8.16005655183257 58.2081093636119)'
|
||||
14553,6430,6440,122361.767586768,f,f,,'LINESTRING(26.4769584465578 59.4850679251882,26.423492 59.613602,26.048584 59.92956,25.632477 60.272515,25.743713 60.360243,25.6409589827655 60.4394217635504)'
|
||||
14555,6449,6481,257364.279393886,f,f,,'LINESTRING(21.3559064590049 61.0800030227353,21.303864 61.005076,20.946808 60.801394,18.153534 60.501202,18.007965 60.483615,17.171631 60.503906,17.0593630437863 60.5503864910584)'
|
||||
14554,6452,6481,197128.229552834,f,f,,'LINESTRING(21.3557421230034 61.0800501553429,20.902863 60.846249,18.224945 60.556604,18.0193872312079 60.533018071939)'
|
||||
14548,6483,6486,140169.735736189,f,f,,'LINESTRING(22.3045576957813 60.4368452717433,21.404114 60.329667,19.8472351583549 60.129935739173)'
|
||||
8456,6547,6559,21158.5735245602,f,t,'"MW"=>"None", "TSO"=>"None", "oid"=>"89791", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"None", "symbol"=>"DC-Line", "country"=>"NO", "t9_code"=>"None", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(6.64851407057135 59.5996162767494,6.99238592942864 59.5246589234811)'
|
||||
5614,5673,8464,38561.1931761179,f,t,'"MW"=>"None", "TSO"=>"None", "oid"=>"32514", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"DolWin 3", "symbol"=>"DC-Line", "country"=>"DE", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(7.223511 53.1805270078955,7.223511 53.179704,7.21527100000001 53.121229,7.24273699999999 52.932086,7.16056753068224 52.8486333236236)'
|
||||
14549,6916,7278,192679.375330137,f,f,,'LINESTRING(10.883331 53.950429,11.25 54.062612,11.25 54.062612,11.657867 54.186548,12.208557 54.386955,12.236023 54.402946,12.43515 54.541003,12.602692 54.684153,12.745514 54.844199,12.744141 54.842618,12.87735 54.979978,12.947388 55.077581,12.9299984288384 55.0630403498842)'
|
||||
12690,6934,7963,6905.52230262641,f,t,,'LINESTRING(7.15460523215685 53.4027398808691,7.24823000000001 53.375956)'
|
||||
5577,5,94,362337.247822072,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32523", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>" ", "symbol"=>"DC-Line", "country"=>"SA", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "CreatedDate"=>"None", "DeletedDate"=>"None", "ModifiedDate"=>"None", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(39.6360309199414 24.7771549364779,40.253906 27.059126,40.4982466187365 27.9539936298126)'
|
||||
5946,1231,1233,11419.5508883069,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"36200", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"None", "symbol"=>"DC-Line", "country"=>"TR", "t9_code"=>"None", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "CreatedDate"=>"1.487925895e+12", "DeletedDate"=>"None", "ModifiedDate"=>"1.48895783e+12", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(26.680298 40.313043,26.5557438215552 40.3516481544774)'
|
||||
14811,1278,1294,272366.529693327,f,f,,'LINESTRING(21.0892517161789 39.3174860744064,20.867157 39.426647,20.537567 39.544294,20.037689 39.647997,19.980011 39.678655,19.974518 39.748378,19.947052 39.807481,19.871521 39.834905,19.730072 39.852829,19.27002 39.898148,19.234314 39.900255,18.625946 39.932907,18.1732034663802 39.9781277204571)'
|
||||
13588,1349,7428,92513.5411955304,f,f,,'LINESTRING(8.21673410679486 40.9126998173886,8.31665 40.979898,8.55560299999999 41.139365,9.04861499999999 41.276774,9.17007598089397 41.296762471629)'
|
||||
5587,1377,2382,76847.0139826037,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32533", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"200", "symbol"=>"DC-Line", "country"=>"IT", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "CreatedDate"=>"None", "DeletedDate"=>"None", "ModifiedDate"=>"None", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(8.67675371049727 40.6777653795244,9.03900099999999 40.979898,9.22164899999999 41.133159,9.19977299501706 41.2082924934473)'
|
||||
5640,1422,1638,234733.218840324,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32590", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"Rómulo", "symbol"=>"DC-Line", "country"=>"ES", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "CreatedDate"=>"None", "DeletedDate"=>"None", "ModifiedDate"=>"None", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(2.48932993486183 39.561252379133,1.13159199999999 39.610978,0 39.710356,-0.234388957535875 39.7314420592468)'
|
||||
13589,2262,7428,316517.539537871,f,f,,'LINESTRING(9.17009350125146 41.2967653544603,9.38095099999999 41.331451,9.858856 41.352072,10.70755 41.479776,11.25 41.448903,12.100067 41.432431,12.380219 41.426253,12.418671 41.401536,12.704315 41.347948,12.805939 41.368564,12.9016442293009 41.3921592955445)'
|
||||
14802,2258,7029,391819.608605717,f,t,,'LINESTRING(14.0986517070226 42.4133438660838,14.412689 42.431566,15.115814 42.363618,16.269379 42.067646,16.875 42.126747,16.962891 42.135913,18.531189 42.271212,18.7271798293119 42.3522936900005)'
|
||||
14668,2333,3671,146536.932669904,f,t,,'LINESTRING(6.04271995139229 45.4637174756646,6.16607700000001 45.327048,6.351471 45.183973,6.54922499999999 45.148148,6.62338299999999 45.101638,6.642609 45.089036,6.70440700000001 45.05121,6.980438 45.089036,7.00653099999999 45.092914,7.21939099999999 45.094853,7.223511 45.089036,7.378693 44.871443,7.32136143270145 44.8385424366672)'
|
||||
14808,2379,2383,103628.671904731,f,f,,'LINESTRING(9.37725891362686 42.7057449479108,9.79980499999999 42.799431,10.5931379465185 42.9693952059839)'
|
||||
5575,2379,2380,24868.4258834249,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32521", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>" ", "symbol"=>"DC-Line", "country"=>"FR", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "CreatedDate"=>"None", "DeletedDate"=>"None", "ModifiedDate"=>"None", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(9.37679000208623 42.7053229039427,9.357605 42.552069,9.45054814341409 42.5389781005166)'
|
||||
5586,2380,2381,130349.805131517,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32532", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"200", "symbol"=>"DC-Line", "country"=>"FR", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "CreatedDate"=>"None", "DeletedDate"=>"None", "ModifiedDate"=>"None", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(9.45062783092712 42.538721099255,9.35348499999999 42.486277,9.385071 42.11758,9.13387617519614 41.4276065839255)'
|
||||
5582,2381,7428,14773.4696528853,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32528", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>" ", "symbol"=>"DC-Line", "country"=>"FR", "t9_code"=>"FR-IT-01", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"1", "oneCircuit"=>"0", "CreatedDate"=>"None", "DeletedDate"=>"None", "ModifiedDate"=>"1.555318236e+12", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(9.1338505292182 41.4269535813022,9.168091 41.303603,9.17008474107272 41.2967639130447)'
|
||||
5583,2382,7428,11623.019620339,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32529", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>" ", "symbol"=>"DC-Line", "country"=>"IT", "t9_code"=>"FR-IT-01", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"1", "oneCircuit"=>"0", "CreatedDate"=>"None", "DeletedDate"=>"None", "ModifiedDate"=>"1.555323123e+12", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(9.17008474107272 41.2967639130447,9.168091 41.303603,9.18319700000001 41.250968,9.1995514318356 41.2089447559651)'
|
||||
14825,2476,2585,45367.7245799963,f,f,,'LINESTRING(2.98259070757654 42.2776059846425,2.90313700000001 42.397094,2.867432 42.467032,2.77404800000001 42.655172)'
|
||||
8745,3611,8302,9361.61122972312,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"120591", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"None", "symbol"=>"DC-Line", "country"=>"CH", "t9_code"=>"None", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"1", "CreatedDate"=>"1.556535027e+12", "DeletedDate"=>"None", "ModifiedDate"=>"None", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(7.95410166666667 47.5542867377085,7.928009 47.555214,7.937622 47.526475,7.96895162362761 47.4961125343931)'
|
||||
14801,4709,4781,50206.4589537583,f,t,,'LINESTRING(6.43068069229957 50.8136946409214,6.020508 50.766865,5.925751 50.755572,5.73118285928413 50.7304278585398)'
|
||||
14814,4972,5062,232745.802729813,f,f,,'LINESTRING(4.04528166772434 51.9611233898246,2.41561900000001 51.702353,0.794192405058928 51.4189824547604)'
|
||||
5558,4975,7427,45665.1050240866,f,t,'"MW"=>"None", "TSO"=>"None", "oid"=>"32502", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>" ", "symbol"=>"DC-Line", "country"=>"UK", "t9_code"=>" BE-UK-01", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"1", "oneCircuit"=>"0", "CreatedDate"=>"None", "DeletedDate"=>"None", "ModifiedDate"=>"1.555407949e+12", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(1.92947399999999 51.251601,1.27623412238205 51.2327009391635)'
|
||||
14826,4977,4983,52725.5506558225,f,f,,'LINESTRING(1.75051314494826 50.9186901861196,1.43508900000001 50.970535,1.02353536683349 51.0370060560335)'
|
||||
12727,5071,7427,92700.7652335474,t,f,,'LINESTRING(3.25699821065925 51.2773723094758,1.9294829557868 51.251601173908)'
|
||||
14820,5605,6355,388691.281442629,f,f,,'LINESTRING(21.2350493599337 55.6649551399923,21.220093 55.641174,21.122589 55.631872,20.210724 55.776573,19.114838 55.949969,18.491364 55.974567,16.875 56.109576,16.875 56.109576,16.292725 56.158553,15.545654 56.647167)'
|
||||
14805,5624,5847,49985.3600979311,t,f,,'LINESTRING(11.2853515176222 55.5308076825323,11.25 55.515415,11.185455 55.488191,11.134644 55.483523,10.735016 55.405629,10.5537191820378 55.3715610145456)'
|
||||
14824,5646,5685,148283.118314702,f,f,,'LINESTRING(11.9669051857726 55.421747727341,12.087708 54.770593,12.13028 54.564896,12.236023 54.258807,12.267609 54.102086)'
|
||||
5601,5673,7430,56022.9511788432,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32550", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"Baltic Cable 400 kV", "symbol"=>"DC-Line", "country"=>"SE", "t9_code"=>"DE-SE-01", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"1", "oneCircuit"=>"0", "CreatedDate"=>"None", "DeletedDate"=>"None", "ModifiedDate"=>"1.555402087e+12", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(12.9299984288384 55.0630403498842,12.947388 55.077581,13.002319 55.155336,13.095703 55.379891,13.1752359988911 55.5447059797474)'
|
||||
14823,5588,6351,223028.828051207,f,f,,'LINESTRING(16.861267 54.533833,16.274872 55.092515,16.022186 55.289283,15.37674 55.776573,14.8455441317166 56.1736465974452)'
|
||||
5564,5736,5784,165019.269173818,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32508", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"BorWin2", "symbol"=>"DC-Line", "country"=>"DE", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "CreatedDate"=>"None", "DeletedDate"=>"None", "ModifiedDate"=>"1.545224178e+12", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(6.86000811621652 54.3820648628198,7.274323 54.290882,7.288055 53.860626,7.52975500000001 53.417717,7.454224 53.153359,7.39105200000001 53.107217)'
|
||||
5602,5737,5784,157422.786637004,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32551", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"BorWin1", "symbol"=>"DC-Line", "country"=>"DE", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "CreatedDate"=>"None", "DeletedDate"=>"None", "ModifiedDate"=>"1.545224172e+12", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(6.85179661058644 54.3229401608779,6.929626 54.324533,7.209778 54.266026,7.22213700000001 53.866295,7.46932999999999 53.399707,7.39105200000001 53.107217,7.38971611765728 53.0975905041478)'
|
||||
12931,5740,8153,147059.541473767,f,t,,'LINESTRING(7.09421654321873 54.4442608229811,7.044983 54.388555,6.59042399999999 53.907574,6.64672900000001 53.683695,6.594543 53.59821,6.707153 53.530513,7.15458909584828 53.4027444956413)'
|
||||
5625,5741,7074,155612.70583537,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32573", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"SylWin1", "symbol"=>"DC-Line", "country"=>"DE", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "CreatedDate"=>"None", "DeletedDate"=>"None", "ModifiedDate"=>"1.549874782e+12", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(7.78813124671178 54.9221766212277,8.346863 54.26763,9.06784100000001 54.038425,9.32702542887196 53.9319881402015)'
|
||||
5613,5742,7074,90848.7767557952,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32562", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"HelWin2", "symbol"=>"DC-Line", "country"=>"DE", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "CreatedDate"=>"None", "DeletedDate"=>"None", "ModifiedDate"=>"1.545224095e+12", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(8.05753950747769 54.25154162105,8.20541400000001 54.23634,9.10354599999999 53.970628,9.32696419621539 53.9319113429646)'
|
||||
5571,5743,7074,89346.6337548304,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32517", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"HelWin1", "symbol"=>"DC-Line", "country"=>"DE", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "CreatedDate"=>"None", "DeletedDate"=>"None", "ModifiedDate"=>"1.545224101e+12", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(8.12610708224912 54.310749538123,8.238373 54.256401,9.32699442549698 53.9319562532009)'
|
||||
5567,5744,5787,139209.866527364,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32512", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"DolWin1", "symbol"=>"DC-Line", "country"=>"DE", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "CreatedDate"=>"None", "DeletedDate"=>"None", "ModifiedDate"=>"1.545224147e+12", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(6.84493115764205 53.880869,6.909027 53.880869,7.116394 53.835512,7.36358600000001 53.396432,7.32101399999999 53.112163,7.33612100000001 52.893992,7.16075117704058 52.8485079587114)'
|
||||
5570,5745,8272,99066.5793764307,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32515", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"DolWin3", "symbol"=>"DC-Line", "country"=>"DE", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "CreatedDate"=>"None", "DeletedDate"=>"None", "ModifiedDate"=>"1.545224133e+12", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(6.84423599483409 53.8134043878533,6.71127300000001 53.693454,6.65634200000001 53.59821,6.73461900000001 53.55581,7.112274 53.45126,7.05596900000001 53.340713,7.237244 53.26932,7.223511 53.18135,7.223511 53.1805270078955)'
|
||||
14803,5751,5803,280301.445474794,f,t,,'LINESTRING(6.75668661933496 53.437616158174,6.838989 53.664171,6.96258499999999 53.785238,7.34298700000001 53.882488,7.80029300000001 54.517096,8.20678699999999 55.297102,8.86005375885099 55.4336013425692)'
|
||||
14821,5749,6363,575352.425009444,f,f,,'LINESTRING(6.83036734046461 53.4374933986115,6.253967 53.645452,6.33636499999999 55.776573,6.34597800000001 56.029855,6.34597800000001 56.030622,6.43661500000001 58.130121,6.90176957000565 58.2653404287817)'
|
||||
5568,5768,5787,131420.09609615,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32513", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"DolWin2", "symbol"=>"DC-Line", "country"=>"DE", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "CreatedDate"=>"None", "DeletedDate"=>"None", "ModifiedDate"=>"1.545224159e+12", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(7.11083415172816 53.9630966319811,7.07107499999999 53.80795,7.301788 53.39807,7.267456 53.110514,7.29354899999999 52.907246,7.16070024970726 52.8485606886388)'
|
||||
12932,5770,5773,6905.52230262641,f,t,,'LINESTRING(7.15460523215685 53.4027398808691,7.24823000000001 53.375956)'
|
||||
14848,5858,6358,574884.998052791,f,t,,'LINESTRING(6.81690675921544 58.6338502746805,6.63024900000001 58.249559,6.78268399999999 57.579197,7.17544599999999 56.532986,7.17407200000001 56.5345,7.46521000000001 55.776573,7.46521000000001 55.776573,7.64099100000001 55.312736,8.458099 54.316523,9.394684 53.934262)'
|
||||
5581,5893,6072,59184.4227659405,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32527", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>" ", "symbol"=>"DC-Line", "country"=>"UK", "t9_code"=>"222.1.2", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"1", "oneCircuit"=>"0", "CreatedDate"=>"None", "DeletedDate"=>"None", "ModifiedDate"=>"None", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(-4.94702447012386 55.0727948492206,-5.137482 55.042188,-5.62500000000001 54.890036,-5.631866 54.887667,-5.7332134509551 54.813550429852)'
|
||||
5580,5893,6072,58741.4601812995,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32526", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>" ", "symbol"=>"DC-Line", "country"=>"UK", "t9_code"=>"222.1.1", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"1", "oneCircuit"=>"0", "CreatedDate"=>"None", "DeletedDate"=>"None", "ModifiedDate"=>"None", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(-4.94689333475508 55.0726735779237,-5.045471 55.009914,-5.59616099999999 54.840245,-5.62500000000001 54.834709,-5.73306677066227 54.8134313531551)'
|
||||
8009,5897,5936,363085.503577327,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"70191", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"Western HVDC link", "symbol"=>"DC-Line", "country"=>"UK", "t9_code"=>"None", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "CreatedDate"=>"1.514994622e+12", "DeletedDate"=>"None", "ModifiedDate"=>"1.51499467e+12", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(-3.18595885129092 53.213699479605,-3.158569 53.308724,-3.40988200000001 53.511735,-4.081421 53.803084,-5.158081 54.013418,-5.28442399999999 54.866334,-5.177307 55.345546,-4.88616899999999 55.586883,-4.8806877889882 55.7044245716822)'
|
||||
14815,5937,6086,242400.41935291,f,f,,'LINESTRING(-3.12293971810515 53.2087645354697,-3.13934300000001 53.266034,-3.368683 53.377594,-5.18280000000001 53.495399,-5.62500000000001 53.519084,-5.62500000000001 53.519084,-6.101532 53.503568,-6.61057668606004 53.483977180569)'
|
||||
14804,5949,6684,695432.776022422,f,t,,'LINESTRING(6.64773945778347 59.5995729910866,6.483307 59.539192,6.374817 59.538495,6.24847399999999 59.510636,6.196289 59.448566,5.898285 59.321981,5.64697299999999 59.234284,5.62500000000001 59.223042,4.81338500000001 58.813742,2.03384400000001 57.374679,0 56.170023,-0.650940000000012 55.776573,-1.55838055228731 55.2221613174321)'
|
||||
5635,6300,6348,93313.2906756649,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"32585", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"150", "symbol"=>"DC-Line", "country"=>"SE", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "CreatedDate"=>"None", "DeletedDate"=>"None", "ModifiedDate"=>"None", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(18.2272491895352 57.5711315582343,17.274628 57.645401,16.875 57.674052,16.6818074486274 57.692364166947)'
|
||||
14819,6311,6416,122337.134741418,f,f,,'LINESTRING(10.2163282994747 57.1311139024238,10.567474 57.20771,10.737762 57.192832,10.972595 57.230016,11.25 57.33171,11.532898 57.436081,11.867981 57.556366,12.0227165657676 57.561507168045)'
|
||||
14809,6311,6416,122935.90852816,f,f,,'LINESTRING(10.2163571716117 57.1310010356663,10.366974 57.123569,10.578461 57.16678,10.740509 57.15263,11.001434 57.197296,11.174469 57.255281,11.25 57.282754,11.56723 57.399104,12.0227887239052 57.5613889668514)'
|
||||
14806,6339,6343,231949.324357763,t,f,,'LINESTRING(13.5805221045065 55.8609194660763,13.8414 57.218121,14.242401 57.518035,14.543152 57.740083,14.6252826086631 57.7992163579132)'
|
||||
14822,6366,6396,221531.577587461,t,f,,'LINESTRING(9.55911260232728 56.517221415454,9.31778000000001 56.610909,9.37408399999999 56.871495,9.14611800000001 57.139965,9.13650499999999 57.15263,8.728638 57.517298,8.728638 57.51656,8.072205 58.088041,8.10237170729511 58.2442659573338)'
|
||||
11679,6366,6366,5210.95506577122,f,f,,'LINESTRING(8.1598074770784 58.2085243278292,8.10270452292159 58.2443198548829)'
|
||||
14827,6366,6396,210556.812324662,f,f,,'LINESTRING(9.5593768244562 56.5173195981253,9.503174 56.649432,9.540253 56.875247,9.35348499999999 57.103436,9.27932699999999 57.19804,8.83300799999999 57.593919,8.26446499999999 58.085137,8.1602778860472 58.2081284789676)'
|
||||
14810,6366,6396,212662.44654471,f,f,,'LINESTRING(9.5592153675061 56.5172786524048,9.408417 56.631308,9.456482 56.875247,9.27108800000001 57.092992,9.20654300000001 57.172736,8.779449 57.554155,8.143616 58.107636,8.16005655183257 58.2081093636119)'
|
||||
8708,6444,6449,146493.574370529,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"115391", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"Caithness Moray HVDC", "symbol"=>"DC-Line", "country"=>"UK", "t9_code"=>"None", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "CreatedDate"=>"1.550153418e+12", "DeletedDate"=>"None", "ModifiedDate"=>"1.550153457e+12", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(-3.45338156475634 58.4908032474467,-3.00064100000001 58.470721,-3.239594 58.019737,-3.01171670865558 57.4230098547791)'
|
||||
8706,6445,6449,81229.3870806789,f,f,'"MW"=>"None", "TSO"=>"None", "oid"=>"115000", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"Caithness Moray HVDC", "symbol"=>"DC-Line", "country"=>"UK", "t9_code"=>"None", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "CreatedDate"=>"1.550153227e+12", "DeletedDate"=>"None", "ModifiedDate"=>"1.550153468e+12", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(-3.01162654963074 57.4230148179161,-3.01025445036928 58.1523540206105)'
|
||||
14807,6556,6558,91438.6141819039,f,f,,'LINESTRING(24.597015 59.388479,24.514618 59.41015,24.44046 59.58928,24.505005 59.752936,24.503632 59.751553,24.6559859237295 60.1665725262589)'
|
||||
14816,6567,6577,122361.767586768,f,f,,'LINESTRING(26.4769584465578 59.4850679251882,26.423492 59.613602,26.048584 59.92956,25.632477 60.272515,25.743713 60.360243,25.6409589827655 60.4394217635504)'
|
||||
14818,6586,6618,257364.279393886,f,f,,'LINESTRING(21.3559064590049 61.0800030227353,21.303864 61.005076,20.946808 60.801394,18.153534 60.501202,18.007965 60.483615,17.171631 60.503906,17.0593630437863 60.5503864910584)'
|
||||
14817,6589,6618,197128.229552834,f,f,,'LINESTRING(21.3557421230034 61.0800501553429,20.902863 60.846249,18.224945 60.556604,18.0193872312079 60.533018071939)'
|
||||
14812,6620,6623,140169.735736189,f,f,,'LINESTRING(22.3045576957813 60.4368452717433,21.404114 60.329667,19.8472351583549 60.129935739173)'
|
||||
8394,6684,6696,21158.5735245602,f,t,'"MW"=>"None", "TSO"=>"None", "oid"=>"89791", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"None", "symbol"=>"DC-Line", "country"=>"NO", "t9_code"=>"None", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "CreatedDate"=>"1.518010133e+12", "DeletedDate"=>"None", "ModifiedDate"=>"None", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(6.64851407057135 59.5996162767494,6.99238592942864 59.5246589234811)'
|
||||
5569,5787,8272,38561.1931761179,f,t,'"MW"=>"None", "TSO"=>"None", "oid"=>"32514", "ext1"=>"None", "EIC_2"=>"None", "EIC_3"=>"None", "EIC_4"=>"None", "text_"=>"DolWin 3", "symbol"=>"DC-Line", "country"=>"DE", "t9_code"=>"0", "visible"=>"1", "EIC_code"=>"None", "tie_line"=>"0", "oneCircuit"=>"0", "CreatedDate"=>"None", "DeletedDate"=>"None", "ModifiedDate"=>"1.489072219e+12", "Internalcomments"=>"None", "visible_on_printed"=>"1"','LINESTRING(7.223511 53.1805270078955,7.223511 53.179704,7.21527100000001 53.121229,7.24273699999999 52.932086,7.16056753068224 52.8486333236236)'
|
||||
14813,7053,7430,192856.020480538,f,f,,'LINESTRING(10.8823542109264 53.948125809387,11.25 54.061,11.657867 54.186548,12.208557 54.386955,12.236023 54.402946,12.43515 54.541003,12.602692 54.684153,12.745514 54.844199,12.744141 54.842618,12.87735 54.979978,12.947388 55.077581,12.9299984288384 55.0630403498842)'
|
||||
|
Can't render this file because it contains an unexpected character in line 2 and column 33.
|
File diff suppressed because it is too large
Load Diff
@ -1,45 +1,58 @@
|
||||
Link:
|
||||
p_nom:
|
||||
oid:
|
||||
"32551": 400 # BorWin 1
|
||||
"32508": 800 # BorWin 2
|
||||
"32508": 400 #BorWin1
|
||||
"32513": 900 # DolWin 2
|
||||
"32515": 900 # DolWin 3
|
||||
"32517": 576 # HelWin 1
|
||||
"32526": 250 # GB-IE
|
||||
"32527": 250 # GB-IE
|
||||
"32532": 300 # Links on Corse
|
||||
"32528": 300 # Links on Corse
|
||||
"32521": 300 # Links on Corse
|
||||
"32529": 300 # SACOI (between Sardignia and Corse)
|
||||
"32533": 300 # SACOI (between Sardignia and Corse)
|
||||
"32529": 300 # SACOI (between Sardinia and Corse)
|
||||
"32533": 300 # SACOI (between Sardinia and Corse)
|
||||
"32550": 600 # Baltic
|
||||
"115391": 1200 # Caithness Moray HVDC
|
||||
"115000": 1200 # Caithness Moray HVDC
|
||||
index:
|
||||
"14541": 1400 # North-Sea link (NSN Link)
|
||||
"14559": 700 # NO-DK Skagerrak 4
|
||||
"14564": 440 # NO-DK Skagerrak 3
|
||||
"14546": 500 # NO-DK Skagerrak 1-2
|
||||
"11511": 940 # NO-DK Skagerrak joint 1-3
|
||||
"14543": 1440 # SE-SE
|
||||
"14531": 250 # Konti-Skan
|
||||
"14540": 700 # Cobra DK-NL
|
||||
"14537": 1000 # NEMO GB-BE
|
||||
"14538": 1000 # ALEGrO BE-DE
|
||||
"14556": 600 # Storebaelt
|
||||
"12997": 1000 # Sardignia
|
||||
"14804": 1400 # North-Sea link (NSN Link)
|
||||
"14822": 700 # NO-DK Skagerrak 4
|
||||
"14827": 440 # NO-DK Skagerrak 3
|
||||
"14810": 500 # NO-DK Skagerrak 1-2
|
||||
"11679": 940 # NO-DK Skagerrak joint 1-3
|
||||
"14806": 1440 # SE-SE
|
||||
"14819": 250 # Hassing-Lindome
|
||||
"14803": 700 # Cobra DK-NL
|
||||
"T6": 1000 # NEMO GB-BE
|
||||
"14801": 1000 # ALEGrO BE-DE
|
||||
"14807": 600 # SwePol
|
||||
"14820": 700 #NordBalt
|
||||
bus0:
|
||||
index:
|
||||
"8708": "6443" # fix bus-id of substation in GB
|
||||
"8009": "5896" # fix bus-id of substation in GB
|
||||
"11679": "6365" # fix bus-id of substation in NO
|
||||
"12727": "7427" # bus0 == bus1 to remove link in remove_unconnected_components (GB-BE)
|
||||
"5558": "7427" # bus0 == bus1 to remove link in remove_unconnected_components (GB-BE)
|
||||
"5583": "7428" # bus0 == bus1 to remove link in remove_unconnected_components (Sardinia)
|
||||
"13588": "7428" # bus0 == bus1 to remove link in remove_unconnected_components (Sardinia)
|
||||
"T23": "6355" # bus0 == bus1 to remove link in remove_unconnected_components (NordBalt)
|
||||
bus1:
|
||||
index:
|
||||
"11511": "6240" # fix wrong bus allocation from 6241
|
||||
"14559": "6240" # fix wrong bus allocation from 6241
|
||||
"12998": "1333" # combine link 12998 + 12997 in 12998
|
||||
"5627": '2309' # combine link 5627 + 5628 in 5627
|
||||
"8068": "5819" # fix GB location of Anglo-Scottish interconnector
|
||||
length:
|
||||
"12931": "8152" # BorWin3
|
||||
"5582": "2382" # combine link 5583 + 5582 in 5582 (Sardinia)
|
||||
"13589": "1349" # combine link 13589 + 13588 in 13589 (Sardinia)
|
||||
"14820": "6354" # NordBalt
|
||||
length:
|
||||
index:
|
||||
"12998": 409.0
|
||||
"5627": 26.39
|
||||
bus0:
|
||||
index:
|
||||
"14552": "5819" # fix GB location of GB-IE interconnector
|
||||
"5628": "7276" # bus0 == bus1 to remove link in remove_unconnected_components
|
||||
"12997": "7276" # bus0 == bus1 to remove link in remove_unconnected_components
|
||||
"5582": 26.39 # new length of combined links (sum)
|
||||
"13589": 509.0 # new length of combined links (sum)
|
||||
Line:
|
||||
bus1:
|
||||
index:
|
||||
"14573": "7950" #fix bus-id substation in PT (220/380kV issue)
|
||||
bus0:
|
||||
index:
|
||||
"14573": "7179" #fix bus-id substation in PT (220/380kV issue)
|
||||
v_nom:
|
||||
index:
|
||||
"14573": 220 # 220/380kV issue of substation in PT
|
||||
|
@ -1,6 +1,6 @@
|
||||
# SPDX-FileCopyrightText: 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
# Makefile for Sphinx documentation
|
||||
#
|
||||
|
2
doc/_static/theme_overrides.css
vendored
2
doc/_static/theme_overrides.css
vendored
@ -1,5 +1,5 @@
|
||||
/* SPDX-FileCopyrightText: 2017-2020 The PyPSA-Eur Authors
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
SPDX-License-Identifier: MIT
|
||||
*/
|
||||
|
||||
.wy-side-nav-search {
|
||||
|
@ -108,6 +108,7 @@ Make sure that your instance is operating for the next steps.
|
||||
- Option 1. Click on the Tools button and "Install Public Key into Server..". Somewhere in your folder structure must be a public key. I found it with the following folder syntax on my local windows computer -> :\Users\...\.ssh (there should be a PKK file).
|
||||
- Option 2. Click on the Tools button and "Generate new key pair...". Save the private key at a folder you remember and add it to the "private key file" field in WinSCP. Upload the public key to the metadeta of your instance.
|
||||
- Click ok and save. Then click Login. If successfull WinSCP will open on the left side your local computer folder structure and on the right side the folder strucutre of your VM. (If you followed Option 2 and its not initially working. Stop your instance, refresh the website, reopen the WinSCP field. Afterwards your your Login should be successfull)
|
||||
|
||||
If you had struggle with the above steps, you could also try `this video <https://www.youtube.com/watch?v=lYx1oQkEF0E>`_.
|
||||
|
||||
.. note::
|
||||
|
18
doc/conf.py
18
doc/conf.py
@ -1,6 +1,6 @@
|
||||
# SPDX-FileCopyrightText: 20017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
@ -76,7 +76,7 @@ author = u'Jonas Hoersch (KIT, FIAS), Fabian Hofmann (FIAS), David Schlachtberge
|
||||
# The short X.Y version.
|
||||
version = u'0.3'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = u'0.3.0'
|
||||
release = u'0.4.0'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
@ -157,16 +157,12 @@ html_theme_options = {
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
# These folders are copied to the documentation's HTML output
|
||||
html_static_path = ["_static"]
|
||||
|
||||
html_context = {
|
||||
'css_files': [
|
||||
'_static/theme_overrides.css', # override wide tables in RTD theme
|
||||
],
|
||||
}
|
||||
# These paths are either relative to html_static_path
|
||||
# or fully qualified paths (eg. https://...)
|
||||
html_css_files = ["theme_overrides.css"]
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
|
3
doc/configtables/clustering.csv
Normal file
3
doc/configtables/clustering.csv
Normal file
@ -0,0 +1,3 @@
|
||||
,Unit,Values,Description
|
||||
simplify,,,
|
||||
-- to_substations,bool,"{'true','false'}","Aggregates all nodes without power injection (positive or negative, i.e. demand or generation) to electrically closest ones"
|
|
@ -1,19 +1,19 @@
|
||||
,Unit,Values,Description,
|
||||
voltages,kV,"Any subset of {220., 300., 380.}",Voltage levels to consider when,
|
||||
co2limit,:math:`t_{CO_2-eq}/a`,float,Cap on total annual system carbon dioxide emissions,
|
||||
co2base,:math:`t_{CO_2-eq}/a`,float,Reference value of total annual system carbon dioxide emissions if relative emission reduction target is specified in ``{opts}`` wildcard.,
|
||||
,Unit,Values,Description
|
||||
voltages,kV,"Any subset of {220., 300., 380.}",Voltage levels to consider when
|
||||
co2limit,:math:`t_{CO_2-eq}/a`,float,Cap on total annual system carbon dioxide emissions
|
||||
co2base,:math:`t_{CO_2-eq}/a`,float,Reference value of total annual system carbon dioxide emissions if relative emission reduction target is specified in ``{opts}`` wildcard.
|
||||
agg_p_nom_limits,file,path,Reference to ``.csv`` file specifying per carrier generator nominal capacity constraints for individual countries if ``'CCL'`` is in ``{opts}`` wildcard. Defaults to ``data/agg_p_nom_minmax.csv``.
|
||||
extendable_carriers,,,,
|
||||
extendable_carriers,,,
|
||||
-- Generator,--,"Any subset of {'OCGT','CCGT'}",Places extendable conventional power plants (OCGT and/or CCGT) where gas power plants are located today without capacity limits.
|
||||
-- StorageUnit,--,"Any subset of {'battery','H2'}",Adds extendable storage units (battery and/or hydrogen) at every node/bus after clustering without capacity limits and with zero initial capacity.
|
||||
-- Store,--,"Any subset of {'battery','H2'}",Adds extendable storage units (battery and/or hydrogen) at every node/bus after clustering without capacity limits and with zero initial capacity.
|
||||
-- Link,--,Any subset of {'H2 pipeline'},Adds extendable links (H2 pipelines only) at every connection where there are lines or HVDC links without capacity limits and with zero initial capacity. Hydrogen pipelines require hydrogen storage to be modelled as ``Store``.
|
||||
max_hours,,,,
|
||||
max_hours,,,
|
||||
-- battery,h,float,Maximum state of charge capacity of the battery in terms of hours at full output capacity ``p_nom``. Cf. `PyPSA documentation <https://pypsa.readthedocs.io/en/latest/components.html#storage-unit>`_.
|
||||
-- H2,h,float,Maximum state of charge capacity of the hydrogen storage in terms of hours at full output capacity ``p_nom``. Cf. `PyPSA documentation <https://pypsa.readthedocs.io/en/latest/components.html#storage-unit>`_.
|
||||
powerplants_filter,--,"use `pandas.query <https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.query.html>`_ strings here, e.g. Country not in ['Germany']",Filter query for the default powerplant database.,
|
||||
custom_powerplants,--,"use `pandas.query <https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.query.html>`_ strings here, e.g. Country in ['Germany']",Filter query for the custom powerplant database.,
|
||||
conventional_carriers,--,"Any subset of {nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass}",List of conventional power plants to include in the model from ``resources/powerplants.csv``.,
|
||||
renewable_capacities_from_OPSD,,"[solar, onwind, offwind]",List of carriers (offwind-ac and offwind-dc are included in offwind) whose capacities 'p_nom' are aligned to the `OPSD renewable power plant list <https://data.open-power-system-data.org/renewable_power_plants/>`_,
|
||||
estimate_renewable_capacities_from_capacitiy_stats,,,,
|
||||
"-- Fueltype [ppm], e.g. Wind",,"list of fueltypes strings in PyPSA-Eur, e.g. [onwind, offwind-ac, offwind-dc]",converts ppm Fueltype to PyPSA-EUR Fueltype,
|
||||
powerplants_filter,--,"use `pandas.query <https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.query.html>`_ strings here, e.g. Country not in ['Germany']",Filter query for the default powerplant database.
|
||||
custom_powerplants,--,"use `pandas.query <https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.query.html>`_ strings here, e.g. Country in ['Germany']",Filter query for the custom powerplant database.
|
||||
conventional_carriers,--,"Any subset of {nuclear, oil, OCGT, CCGT, coal, lignite, geothermal, biomass}",List of conventional power plants to include in the model from ``resources/powerplants.csv``.
|
||||
renewable_capacities_from_OPSD,,"[solar, onwind, offwind]",List of carriers (offwind-ac and offwind-dc are included in offwind) whose capacities 'p_nom' are aligned to the `OPSD renewable power plant list <https://data.open-power-system-data.org/renewable_power_plants/>`_
|
||||
estimate_renewable_capacities_from_capacitiy_stats,,,
|
||||
"-- Fueltype [ppm], e.g. Wind",,"list of fueltypes strings in PyPSA-Eur, e.g. [onwind, offwind-ac, offwind-dc]",converts ppm Fueltype to PyPSA-EUR Fueltype
|
||||
|
Can't render this file because it has a wrong number of fields in line 5.
|
@ -2,7 +2,7 @@
|
||||
cutout,--,"Should be a folder listed in the configuration ``atlite: cutouts:`` (e.g. 'europe-2013-era5') or reference an existing folder in the directory ``cutouts``. Source module must be ERA5.","Specifies the directory where the relevant weather data ist stored."
|
||||
resource,,,
|
||||
-- method,--,"Must be 'wind'","A superordinate technology type."
|
||||
-- turbine,--,"One of turbine types included in `atlite <https://github.com/PyPSA/atlite/tree/master/atlite/resources/windturbine>`_","Specifies the turbine type and its characteristic power curve."
|
||||
-- turbine,--,"One of turbine types included in `atlite <https://github.com/PyPSA/atlite/tree/master/atlite/resources/windturbine>`__","Specifies the turbine type and its characteristic power curve."
|
||||
capacity_per_sqkm,:math:`MW/km^2`,float,"Allowable density of wind turbine placement."
|
||||
corine,--,"Any *realistic* subset of the `CORINE Land Cover code list <http://www.eea.europa.eu/data-and-maps/data/corine-land-cover-2006-raster-1/corine-land-cover-classes-and/clc_legend.csv/at_download/file>`_","Specifies areas according to CORINE Land Cover codes which are generally eligible for AC-connected offshore wind turbine placement."
|
||||
natura,bool,"{true, false}","Switch to exclude `Natura 2000 <https://en.wikipedia.org/wiki/Natura_2000>`_ natural protection areas. Area is excluded if ``true``."
|
||||
|
|
@ -2,7 +2,7 @@
|
||||
cutout,--,"Should be a folder listed in the configuration ``atlite: cutouts:`` (e.g. 'europe-2013-era5') or reference an existing folder in the directory ``cutouts``. Source module must be ERA5.","Specifies the directory where the relevant weather data ist stored."
|
||||
resource,,,
|
||||
-- method,--,"Must be 'wind'","A superordinate technology type."
|
||||
-- turbine,--,"One of turbine types included in `atlite <https://github.com/PyPSA/atlite/tree/master/atlite/resources/windturbine>`_","Specifies the turbine type and its characteristic power curve."
|
||||
-- turbine,--,"One of turbine types included in `atlite <https://github.com/PyPSA/atlite/tree/master/atlite/resources/windturbine>`__","Specifies the turbine type and its characteristic power curve."
|
||||
capacity_per_sqkm,:math:`MW/km^2`,float,"Allowable density of wind turbine placement."
|
||||
corine,,,
|
||||
-- grid_codes,--,"Any subset of the `CORINE Land Cover code list <http://www.eea.europa.eu/data-and-maps/data/corine-land-cover-2006-raster-1/corine-land-cover-classes-and/clc_legend.csv/at_download/file>`_","Specifies areas according to CORINE Land Cover codes which are generally eligible for wind turbine placement."
|
||||
|
|
@ -1,11 +1,11 @@
|
||||
Trigger, Description, Definition, Status
|
||||
``nH``; i.e. ``2H``-``6H``, Resample the time-resolution by averaging over every ``n`` snapshots, ``prepare_network``: `average_every_nhours() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L110>`_ and its `caller <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L146>`_), In active use
|
||||
``nH``; i.e. ``2H``-``6H``, Resample the time-resolution by averaging over every ``n`` snapshots, ``prepare_network``: `average_every_nhours() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L110>`_ and its `caller <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L146>`__), In active use
|
||||
``nSEG``; e.g. ``4380SEG``, "Apply time series segmentation with `tsam <https://tsam.readthedocs.io/en/latest/index.html>`_ package to ``n`` adjacent snapshots of varying lengths based on capacity factors of varying renewables, hydro inflow and load.", ``prepare_network``: apply_time_segmentation(), In active use
|
||||
``Co2L``, Add an overall absolute carbon-dioxide emissions limit configured in ``electricity: co2limit``. If a float is appended an overall emission limit relative to the emission level given in ``electricity: co2base`` is added (e.g. ``Co2L0.05`` limits emissisions to 5% of what is given in ``electricity: co2base``), ``prepare_network``: `add_co2limit() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L19>`_ and its `caller <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L154>`_, In active use
|
||||
``Ep``, Add cost for a carbon-dioxide price configured in ``costs: emission_prices: co2`` to ``marginal_cost`` of generators (other emission types listed in ``network.carriers`` possible as well), ``prepare_network``: `add_emission_prices() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L24>`_ and its `caller <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L158>`_, In active use
|
||||
``Co2L``, Add an overall absolute carbon-dioxide emissions limit configured in ``electricity: co2limit``. If a float is appended an overall emission limit relative to the emission level given in ``electricity: co2base`` is added (e.g. ``Co2L0.05`` limits emissisions to 5% of what is given in ``electricity: co2base``), ``prepare_network``: `add_co2limit() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L19>`_ and its `caller <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L154>`__, In active use
|
||||
``Ep``, Add cost for a carbon-dioxide price configured in ``costs: emission_prices: co2`` to ``marginal_cost`` of generators (other emission types listed in ``network.carriers`` possible as well), ``prepare_network``: `add_emission_prices() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L24>`_ and its `caller <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/prepare_network.py#L158>`__, In active use
|
||||
``CCL``, Add minimum and maximum levels of generator nominal capacity per carrier for individual countries. These can be specified in the file linked at ``electricity: agg_p_nom_limits`` in the configuration. File defaults to ``data/agg_p_nom_minmax.csv``., ``solve_network``, In active use
|
||||
``EQ``, "Require each country or node to on average produce a minimal share of its total consumption itself. Example: ``EQ0.5c`` demands each country to produce on average at least 50% of its consumption; ``EQ0.5`` demands each node to produce on average at least 50% of its consumption.", ``solve_network``, In active use
|
||||
``ATK``, "Require each node to be autarkic. Example: ``ATK`` removes all lines and links. ``ATKc`` removes all cross-border lines and links.", ``prepare_network``, In active use
|
||||
``BAU``, Add a per-``carrier`` minimal overall capacity; i.e. at least ``40GW`` of ``OCGT`` in Europe; configured in ``electricity: BAU_mincapacities``, ``solve_network``: `add_opts_constraints() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/solve_network.py#L66>`_, Untested
|
||||
``SAFE``, Add a capacity reserve margin of a certain fraction above the peak demand to which renewable generators and storage do *not* contribute. Ignores network., ``solve_network`` `add_opts_constraints() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/solve_network.py#L73>`_, Untested
|
||||
``BAU``, Add a per-``carrier`` minimal overall capacity; i.e. at least ``40GW`` of ``OCGT`` in Europe; configured in ``electricity: BAU_mincapacities``, ``solve_network``: `add_opts_constraints() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/solve_network.py#L66>`__, Untested
|
||||
``SAFE``, Add a capacity reserve margin of a certain fraction above the peak demand to which renewable generators and storage do *not* contribute. Ignores network., ``solve_network`` `add_opts_constraints() <https://github.com/PyPSA/pypsa-eur/blob/6b964540ed39d44079cdabddee8333f486d0cd63/scripts/solve_network.py#L73>`__, Untested
|
||||
``carrier+{c|p}factor``, "Alter the capital cost (``c``) or installable potential (``p``) of a carrier by a factor. Example: ``solar+c0.5`` reduces the capital cost of solar to 50\% of original values.", ``prepare_network``, In active use
|
||||
|
|
@ -2,7 +2,7 @@
|
||||
cutout,--,"Should be a folder listed in the configuration ``atlite: cutouts:`` (e.g. 'europe-2013-era5') or reference an existing folder in the directory ``cutouts``. Source module can be ERA5 or SARAH-2.","Specifies the directory where the relevant weather data ist stored that is specified at ``atlite/cutouts`` configuration. Both ``sarah`` and ``era5`` work."
|
||||
resource,,,
|
||||
-- method,--,"Must be 'pv'","A superordinate technology type."
|
||||
-- panel,--,"One of {'Csi', 'CdTe', 'KANENA'} as defined in `atlite <https://github.com/PyPSA/atlite/tree/master/atlite/resources/solarpanel>`_","Specifies the solar panel technology and its characteristic attributes."
|
||||
-- panel,--,"One of {'Csi', 'CdTe', 'KANENA'} as defined in `atlite <https://github.com/PyPSA/atlite/tree/master/atlite/resources/solarpanel>`__","Specifies the solar panel technology and its characteristic attributes."
|
||||
-- orientation,,,
|
||||
-- -- slope,°,"Realistically any angle in [0., 90.]","Specifies the tilt angle (or slope) of the solar panel. A slope of zero corresponds to the face of the panel aiming directly overhead. A positive tilt angle steers the panel towards the equator."
|
||||
-- -- azimuth,°,"Any angle in [0., 360.]","Specifies the `azimuth <https://en.wikipedia.org/wiki/Azimuth>`_ orientation of the solar panel. South corresponds to 180.°."
|
||||
|
|
@ -1,3 +1,3 @@
|
||||
,Unit,Values,Description
|
||||
name,--,"One of {'gurobi', 'cplex', 'cbc', 'glpk', 'ipopt'}; potentially more possible","Solver to use for optimisation problems in the workflow; e.g. clustering and linear optimal power flow."
|
||||
opts,--,"Parameter list for `Gurobi <https://www.gurobi.com/documentation/8.1/refman/parameters.html>`_ and `CPLEX <https://www.ibm.com/support/knowledgecenter/SSSA5P_12.5.1/ilog.odms.cplex.help/CPLEX/Parameters/topics/introListAlpha.html>`_","Solver specific parameter settings."
|
||||
opts,--,"Parameter list for `Gurobi <https://www.gurobi.com/documentation/8.1/refman/parameters.html>`_ and `CPLEX <https://www.ibm.com/docs/en/icos/20.1.0?topic=cplex-topical-list-parameters>`_","Solver specific parameter settings."
|
||||
|
|
@ -6,12 +6,10 @@ logging,,,
|
||||
-- format,--,"","Custom format for log messages. See `LogRecord <https://docs.python.org/3/library/logging.html#logging.LogRecord>`_ attributes."
|
||||
summary_dir,--,"e.g. 'results'","Directory into which results are written."
|
||||
countries,--,"Subset of {'AL', 'AT', 'BA', 'BE', 'BG', 'CH', 'CZ', 'DE', 'DK', 'EE', 'ES', 'FI', 'FR', 'GB', 'GR', 'HR', 'HU', 'IE', 'IT', 'LT', 'LU', 'LV', 'ME', 'MK', 'NL', 'NO', 'PL', 'PT', 'RO', 'RS', 'SE', 'SI', 'SK'}","European countries defined by their `Two-letter country codes (ISO 3166-1) <https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2>`_ which should be included in the energy system model."
|
||||
focus_weights,--,"Keys should be two-digit country codes (e.g. DE) and values should range between 0 and 1","Ratio of total clusters for particular countries. the remaining weight is distributed according to mean load. An example: ``focus_weights: DE: 0.6 FR: 0.2``."
|
||||
focus_weights,--,"Keys should be two-digit country codes (e.g. DE) and values should range between 0 and 1","Ratio of total clusters for particular countries. the remaining weight is distributed according to mean load. An example: ``focus_weights: 'DE': 0.6 'FR': 0.2``."
|
||||
enable,,,
|
||||
-- prepare_links_p_nom,bool,"{true, false}","Switch to retrieve current HVDC projects from `Wikipedia <https://en.wikipedia.org/wiki/List_of_HVDC_projects>`_"
|
||||
-- retrieve_databundle,bool,"{true, false}","Switch to retrieve databundle from zenodo via the rule :mod:`retrieve_databundle` or whether to keep a custom databundle located in the corresponding folder."
|
||||
-- build_cutout,bool,"{true, false}","Switch to enable the building of cutouts via the rule :mod:`build_cutout`."
|
||||
-- retrieve_cutout,bool,"{true, false}","Switch to enable the retrieval of cutouts from zenodo with :mod:`retrieve_cutout`."
|
||||
-- build_natura_raster,bool,"{true, false}","Switch to enable the creation of the raster ``natura.tiff`` via the rule :mod:`build_natura_raster`."
|
||||
-- retrieve_natura_raster,bool,"{true, false}","Switch to enable the retrieval of ``natura.tiff`` from zenodo with :mod:`retrieve_natura_raster`."
|
||||
-- custom_busmap,bool,"{true, false}","Switch to enable the use of custom busmaps in rule :mod:`cluster_network`. If activated the rule looks for provided busmaps at ``data/custom_busmap_elec_s{simpl}_{clusters}.csv`` which should have the same format as ``resources/busmap_elec_s{simpl}_{clusters}.csv``, i.e. the index should contain the buses of ``networks/elec_s{simpl}.nc``."
|
||||
|
|
@ -18,7 +18,8 @@ Top-level configuration
|
||||
|
||||
.. literalinclude:: ../config.default.yaml
|
||||
:language: yaml
|
||||
:lines: 5-12,20,27-34
|
||||
:lines: 5-12,20,31-38
|
||||
|
||||
|
||||
.. csv-table::
|
||||
:header-rows: 1
|
||||
@ -50,7 +51,8 @@ An exemplary dependency graph (starting from the simplification rules) then look
|
||||
|
||||
.. literalinclude:: ../config.default.yaml
|
||||
:language: yaml
|
||||
:lines: 14-18
|
||||
:start-at: scenario:
|
||||
:end-before: countries:
|
||||
|
||||
.. csv-table::
|
||||
:header-rows: 1
|
||||
@ -66,7 +68,8 @@ Specifies the temporal range to build an energy system model for as arguments to
|
||||
|
||||
.. literalinclude:: ../config.default.yaml
|
||||
:language: yaml
|
||||
:lines: 22-25
|
||||
:start-at: snapshots:
|
||||
:end-before: enable:
|
||||
|
||||
.. csv-table::
|
||||
:header-rows: 1
|
||||
@ -80,7 +83,8 @@ Specifies the temporal range to build an energy system model for as arguments to
|
||||
|
||||
.. literalinclude:: ../config.default.yaml
|
||||
:language: yaml
|
||||
:lines: 36-60
|
||||
:start-at: electricity:
|
||||
:end-before: atlite:
|
||||
|
||||
.. csv-table::
|
||||
:header-rows: 1
|
||||
@ -93,7 +97,7 @@ Specifies the temporal range to build an energy system model for as arguments to
|
||||
.. _atlite_cf:
|
||||
|
||||
``atlite``
|
||||
=============
|
||||
==========
|
||||
|
||||
Define and specify the ``atlite.Cutout`` used for calculating renewable potentials and time-series. All options except for ``features`` are directly used as `cutout parameters <https://atlite.readthedocs.io/en/latest/ref_api.html#cutout>`_.
|
||||
|
||||
@ -117,7 +121,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia
|
||||
|
||||
.. literalinclude:: ../config.default.yaml
|
||||
:language: yaml
|
||||
:lines: 77-94
|
||||
:start-at: renewable:
|
||||
:end-before: offwind-ac:
|
||||
|
||||
.. csv-table::
|
||||
:header-rows: 1
|
||||
@ -129,7 +134,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia
|
||||
|
||||
.. literalinclude:: ../config.default.yaml
|
||||
:language: yaml
|
||||
:lines: 77,95-107
|
||||
:start-at: offwind-ac:
|
||||
:end-before: offwind-dc:
|
||||
|
||||
.. csv-table::
|
||||
:header-rows: 1
|
||||
@ -141,7 +147,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia
|
||||
|
||||
.. literalinclude:: ../config.default.yaml
|
||||
:language: yaml
|
||||
:lines: 77,108-121
|
||||
:start-at: offwind-dc:
|
||||
:end-before: solar:
|
||||
|
||||
.. csv-table::
|
||||
:header-rows: 1
|
||||
@ -153,7 +160,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia
|
||||
|
||||
.. literalinclude:: ../config.default.yaml
|
||||
:language: yaml
|
||||
:lines: 77,122-141
|
||||
:start-at: solar:
|
||||
:end-before: hydro:
|
||||
|
||||
.. csv-table::
|
||||
:header-rows: 1
|
||||
@ -165,7 +173,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia
|
||||
|
||||
.. literalinclude:: ../config.default.yaml
|
||||
:language: yaml
|
||||
:lines: 77,142-147
|
||||
:start-at: hydro:
|
||||
:end-before: lines:
|
||||
|
||||
.. csv-table::
|
||||
:header-rows: 1
|
||||
@ -179,7 +188,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia
|
||||
|
||||
.. literalinclude:: ../config.default.yaml
|
||||
:language: yaml
|
||||
:lines: 149-157
|
||||
:start-at: lines:
|
||||
:end-before: links:
|
||||
|
||||
.. csv-table::
|
||||
:header-rows: 1
|
||||
@ -193,7 +203,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia
|
||||
|
||||
.. literalinclude:: ../config.default.yaml
|
||||
:language: yaml
|
||||
:lines: 159-163
|
||||
:start-at: links:
|
||||
:end-before: transformers:
|
||||
|
||||
.. csv-table::
|
||||
:header-rows: 1
|
||||
@ -207,7 +218,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia
|
||||
|
||||
.. literalinclude:: ../config.default.yaml
|
||||
:language: yaml
|
||||
:lines: 165-168
|
||||
:start-at: transformers:
|
||||
:end-before: load:
|
||||
|
||||
.. csv-table::
|
||||
:header-rows: 1
|
||||
@ -221,7 +233,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia
|
||||
|
||||
.. literalinclude:: ../config.default.yaml
|
||||
:language: yaml
|
||||
:lines: 170-176
|
||||
:start-at: load:
|
||||
:end-before: costs:
|
||||
|
||||
.. csv-table::
|
||||
:header-rows: 1
|
||||
@ -235,7 +248,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia
|
||||
|
||||
.. literalinclude:: ../config.default.yaml
|
||||
:language: yaml
|
||||
:lines: 178-190
|
||||
:start-after: scaling_factor:
|
||||
:end-before: solving:
|
||||
|
||||
.. csv-table::
|
||||
:header-rows: 1
|
||||
@ -256,7 +270,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia
|
||||
|
||||
.. literalinclude:: ../config.default.yaml
|
||||
:language: yaml
|
||||
:lines: 192-202
|
||||
:start-at: solving:
|
||||
:end-before: solver:
|
||||
|
||||
.. csv-table::
|
||||
:header-rows: 1
|
||||
@ -268,7 +283,8 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia
|
||||
|
||||
.. literalinclude:: ../config.default.yaml
|
||||
:language: yaml
|
||||
:lines: 192,203-219
|
||||
:start-at: solver:
|
||||
:end-before: plotting:
|
||||
|
||||
.. csv-table::
|
||||
:header-rows: 1
|
||||
@ -282,7 +298,7 @@ Define and specify the ``atlite.Cutout`` used for calculating renewable potentia
|
||||
|
||||
.. literalinclude:: ../config.default.yaml
|
||||
:language: yaml
|
||||
:lines: 221-299
|
||||
:start-at: plotting:
|
||||
|
||||
.. csv-table::
|
||||
:header-rows: 1
|
||||
|
BIN
doc/img/base.png
BIN
doc/img/base.png
Binary file not shown.
Before Width: | Height: | Size: 1.7 MiB After Width: | Height: | Size: 1.6 MiB |
BIN
doc/img/synchronisation.png
Normal file
BIN
doc/img/synchronisation.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 648 KiB |
@ -9,8 +9,8 @@ PyPSA-Eur: An Open Optimisation Model of the European Transmission System
|
||||
.. image:: https://img.shields.io/github/v/release/pypsa/pypsa-eur?include_prereleases
|
||||
:alt: GitHub release (latest by date including pre-releases)
|
||||
|
||||
.. image:: https://travis-ci.org/PyPSA/pypsa-eur.svg?branch=master
|
||||
:target: https://travis-ci.org/PyPSA/pypsa-eur
|
||||
.. image:: https://github.com/pypsa/pypsa-eur/actions/workflows/ci.yaml/badge.svg
|
||||
:target: https://github.com/PyPSA/pypsa-eur/actions
|
||||
|
||||
.. image:: https://readthedocs.org/projects/pypsa-eur/badge/?version=latest
|
||||
:target: https://pypsa-eur.readthedocs.io/en/latest/?badge=latest
|
||||
@ -49,7 +49,18 @@ The restriction to freely available and open data encourages the open exchange o
|
||||
|
||||
PyPSA-Eur is designed to be imported into the open toolbox `PyPSA <https://www.pypsa.org>`_ for which `documentation <https://pypsa.org/doc>`_ is available as well.
|
||||
|
||||
This project is maintained by the `Energy System Modelling group <https://www.iai.kit.edu/english/2338.php>`_ at the `Institute for Automation and Applied Informatics <https://www.iai.kit.edu/english/index.php>`_ at the `Karlsruhe Institute of Technology <http://www.kit.edu/english/index.php>`_. The group is funded by the `Helmholtz Association <https://www.helmholtz.de/en/>`_ until 2024. Previous versions were developed by the `Renewable Energy Group <https://fias.uni-frankfurt.de/physics/schramm/renewable-energy-system-and-network-analysis/>`_ at `FIAS <https://fias.uni-frankfurt.de/>`_ to carry out simulations for the `CoNDyNet project <http://condynet.de/>`_, financed by the `German Federal Ministry for Education and Research (BMBF) <https://www.bmbf.de/en/index.html>`_ as part of the `Stromnetze Research Initiative <http://forschung-stromnetze.info/projekte/grundlagen-und-konzepte-fuer-effiziente-dezentrale-stromnetze/>`_.
|
||||
This project is currently maintained by the `Department of Digital
|
||||
Transformation in Energy Systems <https:/www.ensys.tu-berlin.de>`_ at the
|
||||
`Technische Universität Berlin <https://www.tu.berlin>`_. Previous versions were
|
||||
developed within the `IAI <http://www.iai.kit.edu>`_ at the `Karlsruhe Institute of
|
||||
Technology (KIT) <http://www.kit.edu/english/index.php>`_ and by the `Renewable
|
||||
Energy Group
|
||||
<https://fias.uni-frankfurt.de/physics/schramm/renewable-energy-system-and-network-analysis/>`_
|
||||
at `FIAS <https://fias.uni-frankfurt.de/>`_ to carry out simulations for the
|
||||
`CoNDyNet project <http://condynet.de/>`_, financed by the `German Federal
|
||||
Ministry for Education and Research (BMBF) <https://www.bmbf.de/en/index.html>`_
|
||||
as part of the `Stromnetze Research Initiative
|
||||
<http://forschung-stromnetze.info/projekte/grundlagen-und-konzepte-fuer-effiziente-dezentrale-stromnetze/>`_.
|
||||
|
||||
A version of the model that adds building heating, transport and industry sectors to the model,
|
||||
as well as gas networks, is currently being developed in the `PyPSA-Eur-Sec repository <https://github.com/pypsa/pypsa-eur-sec>`_.
|
||||
@ -141,7 +152,7 @@ If you are (relatively) new to energy system modelling and optimisation
|
||||
and plan to use PyPSA-Eur, the following resources are *one way* to get started
|
||||
in addition to reading this documentation.
|
||||
|
||||
- Documentation of `PyPSA <https://pypsa.readthedocs.io>`_, the package for
|
||||
- Documentation of `PyPSA <https://pypsa.readthedocs.io>`__, the package for
|
||||
simulating and optimising modern power systems which PyPSA-Eur uses under the hood.
|
||||
- Course on `Energy System Modelling <https://nworbmot.org/courses/esm-2019/>`_,
|
||||
Karlsruhe Institute of Technology (KIT), `Dr. Tom Brown <https://nworbmot.org>`_
|
||||
@ -196,7 +207,7 @@ Licence
|
||||
|
||||
PyPSA-Eur work is released under multiple licenses:
|
||||
|
||||
* All original source code is licensed as free software under `GPL-3.0-or-later <LICENSES/GPL-3.0-or-later.txt>`_.
|
||||
* All original source code is licensed as free software under `MIT <LICENSES/MIT.txt>`_.
|
||||
* The documentation is licensed under `CC-BY-4.0 <LICENSES/CC-BY-4.0.txt>`_.
|
||||
* Configuration files are mostly licensed under `CC0-1.0 <LICENSES/CC0-1.0.txt>`_.
|
||||
* Data files are licensed under `CC-BY-4.0 <LICENSES/CC-BY-4.0.txt>`_.
|
||||
|
@ -1,5 +1,5 @@
|
||||
REM SPDX-FileCopyrightText: 2019-2020 The PyPSA-Eur Authors
|
||||
REM SPDX-License-Identifier: GPL-3.0-or-later
|
||||
REM SPDX-License-Identifier: MIT
|
||||
|
||||
@ECHO OFF
|
||||
|
||||
|
@ -84,8 +84,8 @@ Rule ``make_summary``
|
||||
Rule ``plot_summary``
|
||||
========================
|
||||
|
||||
.. graphviz::
|
||||
:align: center
|
||||
.. .. graphviz::
|
||||
.. :align: center
|
||||
|
||||
|
||||
|
||||
|
@ -27,7 +27,6 @@ With these and the externally extracted ENTSO-E online map topology
|
||||
Then the process continues by calculating conventional power plant capacities, potentials, and per-unit availability time series for variable renewable energy carriers and hydro power plants with the following rules:
|
||||
|
||||
- :mod:`build_powerplants` for today's thermal power plant capacities using `powerplantmatching <https://github.com/FRESNA/powerplantmatching>`_ allocating these to the closest substation for each powerplant,
|
||||
- :mod:`build_natura_raster` for rasterising NATURA2000 natural protection areas,
|
||||
- :mod:`build_renewable_profiles` for the hourly capacity factors and installation potentials constrained by land-use in each substation's Voronoi cell for PV, onshore and offshore wind, and
|
||||
- :mod:`build_hydro_profile` for the hourly per-unit hydro power availability time series.
|
||||
|
||||
@ -41,11 +40,9 @@ together into a detailed PyPSA network stored in ``networks/elec.nc``.
|
||||
preparation/build_shapes
|
||||
preparation/build_load_data
|
||||
preparation/build_cutout
|
||||
preparation/build_natura_raster
|
||||
preparation/prepare_links_p_nom
|
||||
preparation/base_network
|
||||
preparation/build_bus_regions
|
||||
preparation/build_natura_raster
|
||||
preparation/build_powerplants
|
||||
preparation/build_renewable_profiles
|
||||
preparation/build_hydro_profile
|
||||
|
@ -1,39 +0,0 @@
|
||||
..
|
||||
SPDX-FileCopyrightText: 2019-2020 The PyPSA-Eur Authors
|
||||
|
||||
SPDX-License-Identifier: CC-BY-4.0
|
||||
|
||||
.. _natura:
|
||||
|
||||
Rule ``build_natura_raster``
|
||||
===============================
|
||||
|
||||
.. graphviz::
|
||||
:align: center
|
||||
|
||||
digraph snakemake_dag {
|
||||
graph [bgcolor=white,
|
||||
margin=0,
|
||||
size="8,5"
|
||||
];
|
||||
node [fontname=sans,
|
||||
fontsize=10,
|
||||
penwidth=2,
|
||||
shape=box,
|
||||
style=rounded
|
||||
];
|
||||
edge [color=grey,
|
||||
penwidth=2
|
||||
];
|
||||
9 [color="0.22 0.6 0.85",
|
||||
label=build_renewable_profiles];
|
||||
12 [color="0.31 0.6 0.85",
|
||||
fillcolor=gray,
|
||||
label=build_natura_raster,
|
||||
style=filled];
|
||||
12 -> 9;
|
||||
}
|
||||
|
||||
|
|
||||
|
||||
.. automodule:: build_natura_raster
|
@ -41,9 +41,6 @@ Rule ``build_renewable_profiles``
|
||||
8 [color="0.00 0.6 0.85",
|
||||
label=build_shapes];
|
||||
8 -> 9;
|
||||
12 [color="0.31 0.6 0.85",
|
||||
label=build_natura_raster];
|
||||
12 -> 9;
|
||||
13 [color="0.56 0.6 0.85",
|
||||
label=build_cutout];
|
||||
13 -> 9;
|
||||
|
@ -25,7 +25,7 @@ Rule ``retrieve_cutout``
|
||||
:target: https://doi.org/10.5281/zenodo.3517949
|
||||
|
||||
Cutouts are spatio-temporal subsets of the European weather data from the `ECMWF ERA5 <https://software.ecmwf.int/wiki/display/CKB/ERA5+data+documentation>`_ reanalysis dataset and the `CMSAF SARAH-2 <https://wui.cmsaf.eu/safira/action/viewDoiDetails?acronym=SARAH_V002>`_ solar surface radiation dataset for the year 2013.
|
||||
They have been prepared by and are for use with the `atlite <https://github.com/PyPSA/atlite>`_ tool. You can either generate them yourself using the ``build_cutouts`` rule or retrieve them directly from `zenodo <https://doi.org/10.5281/zenodo.3517949>`_ through the rule ``retrieve_cutout``.
|
||||
They have been prepared by and are for use with the `atlite <https://github.com/PyPSA/atlite>`_ tool. You can either generate them yourself using the ``build_cutouts`` rule or retrieve them directly from `zenodo <https://doi.org/10.5281/zenodo.3517949>`__ through the rule ``retrieve_cutout``.
|
||||
The :ref:`tutorial` uses a smaller cutout than required for the full model (30 MB), which is also automatically downloaded.
|
||||
|
||||
.. note::
|
||||
@ -50,30 +50,3 @@ The :ref:`tutorial` uses a smaller cutout than required for the full model (30 M
|
||||
|
||||
.. seealso::
|
||||
For details see :mod:`build_cutout` and read the `atlite documentation <https://atlite.readthedocs.io>`_.
|
||||
|
||||
|
||||
Rule ``retrieve_natura_raster``
|
||||
-------------------------------
|
||||
|
||||
.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.4706686.svg
|
||||
:target: https://doi.org/10.5281/zenodo.4706686
|
||||
|
||||
This rule, as a substitute for :mod:`build_natura_raster`, downloads an already rasterized version (`natura.tiff <https://zenodo.org/record/4706686/files/natura.tiff>`_) of `Natura 2000 <https://en.wikipedia.org/wiki/Natura_2000>`_ natural protection areas to reduce computation times. The file is placed into the ``resources`` sub-directory.
|
||||
|
||||
**Relevant Settings**
|
||||
|
||||
.. code:: yaml
|
||||
|
||||
enable:
|
||||
build_natura_raster:
|
||||
|
||||
.. seealso::
|
||||
Documentation of the configuration file ``config.yaml`` at
|
||||
:ref:`toplevel_cf`
|
||||
|
||||
**Outputs**
|
||||
|
||||
- ``resources/natura.tiff``: Rasterized version of `Natura 2000 <https://en.wikipedia.org/wiki/Natura_2000>`_ natural protection areas to reduce computation times.
|
||||
|
||||
.. seealso::
|
||||
For details see :mod:`build_natura_raster`.
|
||||
|
@ -1,5 +1,5 @@
|
||||
..
|
||||
SPDX-FileCopyrightText: 2019-2020 The PyPSA-Eur Authors
|
||||
SPDX-FileCopyrightText: 2019-2021 The PyPSA-Eur Authors
|
||||
|
||||
SPDX-License-Identifier: CC-BY-4.0
|
||||
|
||||
@ -7,26 +7,201 @@
|
||||
Release Notes
|
||||
##########################################
|
||||
|
||||
Synchronisation Release - Ukraine and Moldova (17th March 2022)
|
||||
===============================================================
|
||||
|
||||
On March 16, 2022, the transmission networks of Ukraine and Moldova have
|
||||
successfully been `synchronised with the continental European grid <https://www.entsoe.eu/news/2022/03/16/continental-europe-successful-synchronisation-with-ukraine-and-moldova-power-systems/>`_. We have taken
|
||||
this as an opportunity to add the power systems of Ukraine and Moldova to
|
||||
PyPSA-Eur. This includes:
|
||||
|
||||
.. image:: img/synchronisation.png
|
||||
:width: 500
|
||||
|
||||
* the transmission network topology from the `ENTSO-E interactive map <https://www.entsoe.eu/data/map/>`_.
|
||||
|
||||
* existing power plants (incl. nuclear, coal, gas and hydro) from the `powerplantmatching <https://github.com/fresna/powerplantmatching>`_ tool
|
||||
|
||||
* country-level load time series from ENTSO-E through the `OPSD platform <https://data.open-power-system-data.org/time_series/2020-10-06>`_, which are then distributed heuristically to substations by GDP and population density.
|
||||
|
||||
* wind and solar profiles based on ERA5 and SARAH-2 weather data
|
||||
|
||||
* hydro profiles based on historical `EIA generation data <https://www.eia.gov/international/data/world>`_
|
||||
|
||||
* a simplified calculation of wind and solar potentials based on the `Copernicus Land Cover dataset <https://land.copernicus.eu/global/products/lc>`_.
|
||||
|
||||
* electrical characteristics of 750 kV transmission lines
|
||||
|
||||
The Crimean power system is currently disconnected from the main Ukrainian grid and, hence, not included.
|
||||
|
||||
This release is not on the ``master`` branch. It can be used with
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
git clone https://github.com/pypsa/pypsa-eur
|
||||
git checkout synchronisation-release
|
||||
|
||||
|
||||
Upcoming Release
|
||||
================
|
||||
|
||||
* Switch to new major release, ``>=v0.2.1`` of ``atlite``. The version upgrade comes along with significant speed up for the rule ``build_renewable_profiles.py`` (~factor 2). A lot of the code which calculated the landuse availability is now outsourced and does not rely on ``glaes``, ``geokit`` anymore. This facilitates the environment building and version compatibility of ``gdal``, ``libgdal`` with other packages.
|
||||
* The minimum python version was set to ``3.8``.
|
||||
* The rule and script ``build_country_flh`` are removed as they're no longer used or maintained.
|
||||
* The flag ``keep_all_available_areas`` in the configuration for renewable potentials (config.yaml -> renewable -> {technology}) was deprecated and now defaults to ``True``.
|
||||
* The tutorial cutout was renamed from ``cutouts/europe-2013-era5.nc`` to ``cutouts/europe-2013-era5-tutorial.nc`` to accomodate tutorial and productive cutouts side-by-side.
|
||||
* Fix: Value for ``co2base`` in ``config.yaml`` adjusted to 1.487e9 t CO2-eq (from 3.1e9 t CO2-eq). The new value represents emissions related to the electricity sector for EU+UK. The old value was ~2x too high and used when the emissions wildcard in ``{opts}`` was used.
|
||||
* Add option to include marginal costs of links representing fuel cells, electrolysis, and battery inverters
|
||||
[`#232 <https://github.com/PyPSA/pypsa-eur/pull/232>`_].
|
||||
* Add an efficiency factor of 88.55% to offshore wind capacity factors
|
||||
as a proxy for wake losses. More rigorous modelling is `planned <https://github.com/PyPSA/pypsa-eur/issues/153>`_
|
||||
[`#277 <https://github.com/PyPSA/pypsa-eur/pull/277>`_].
|
||||
|
||||
* The default deployment density of AC- and DC-connected offshore wind capacity is reduced from 3 MW/sqkm
|
||||
to a more conservative estimate of 2 MW/sqkm [`#280 <https://github.com/PyPSA/pypsa-eur/pull/280>`_].
|
||||
|
||||
* Following discussion in `#285 <https://github.com/PyPSA/pypsa-eur/issues/285>`_ we have disabled the
|
||||
correction factor for solar PV capacity factors by default while satellite data is used.
|
||||
A correction factor of 0.854337 is recommended if reanalysis data like ERA5 is used.
|
||||
|
||||
* Resource definitions for memory usage now follow [Snakemake standard resource definition](https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#standard-resources) ```mem_mb`` rather than ``mem``.
|
||||
|
||||
* Network building is made deterministic by supplying a fixed random state to network clustering routines.
|
||||
|
||||
* New network topology extracted from the ENTSO-E interactive map.
|
||||
|
||||
* Remove rules to build or retrieve rasterized NATURA 2000 dataset. Renewable potential calculation now directly uses the shapefiles.
|
||||
|
||||
* Cache data and cutouts folders. This cache will be updated weekly.
|
||||
|
||||
* Add rule to automatically retrieve Natura2000 natural protection areas. Switch of file format to GPKG.
|
||||
* The unused argument ``simple_hvdc_costs`` in :mod:`add_electricity` was removed.
|
||||
|
||||
* Iterative solving with impedance updates is skipped if there are no expandable lines.
|
||||
|
||||
* Switch from Germany to Belgium for continuous integration and tutorial to save resources.
|
||||
|
||||
* Use updated SARAH-2 and ERA5 cutouts with slightly wider scope to east and additional variables.
|
||||
|
||||
* Fix crs bug. Change crs 4236 to 4326.
|
||||
|
||||
* Update rasterio version to correctly calculate exclusion raster
|
||||
|
||||
* Techno-economic parameters of technologies (e.g. costs and efficiencies) will now be retrieved from a separate repository `PyPSA/technology-data <https://github.com/pypsa/technology-data>`_
|
||||
that collects assumptions from a variety of sources. It is activated by default with ``enable: retrieve_cost_data: true`` and controlled with ``costs: year:`` and ``costs: version:``.
|
||||
The location of this data changed from ``data/costs.csv`` to ``resources/costs.csv``
|
||||
[`#184 <https://github.com/PyPSA/pypsa-eur/pull/184>`_].
|
||||
|
||||
|
||||
PyPSA-Eur 0.4.0 (22th September 2021)
|
||||
=====================================
|
||||
|
||||
**New Features and Changes**
|
||||
|
||||
* With this release, we change the license from copyleft GPLv3 to the more
|
||||
liberal MIT license with the consent of all contributors
|
||||
[`#276 <https://github.com/PyPSA/pypsa-eur/pull/276>`_].
|
||||
|
||||
* Switch to the new major ``atlite`` release v0.2. The version upgrade comes
|
||||
along with significant speed up for the rule ``build_renewable_profiles.py``
|
||||
(~factor 2). A lot of the code which calculated the land-use availability is now
|
||||
outsourced and does not rely on ``glaes``, ``geokit`` anymore. This facilitates
|
||||
the environment building and version compatibility of ``gdal``, ``libgdal`` with
|
||||
other packages [`#224 <https://github.com/PyPSA/pypsa-eur/pull/224>`_].
|
||||
|
||||
* Implemented changes to ``n.snapshot_weightings`` in new PyPSA version v0.18
|
||||
(cf. `PyPSA/PyPSA/#227 <https://github.com/PyPSA/PyPSA/pull/227>`_)
|
||||
[`#259 <https://github.com/PyPSA/pypsa-eur/pull/259>`_].
|
||||
|
||||
* Add option to pre-aggregate nodes without power injections (positive or
|
||||
negative, i.e. generation or demand) to electrically closest nodes or neighbors
|
||||
in ``simplify_network``. Defaults to ``False``. This affects nodes that are no
|
||||
substations or have no offshore connection.
|
||||
|
||||
* In :mod:`simplify_network`, bus columns with no longer correct entries are
|
||||
removed (symbol, tags, under_construction, substation_lv, substation_off)
|
||||
[`#219 <https://github.com/PyPSA/pypsa-eur/pull/219>`_]
|
||||
|
||||
* Add option to include marginal costs of links representing fuel cells,
|
||||
electrolysis, and battery inverters
|
||||
[`#232 <https://github.com/PyPSA/pypsa-eur/pull/232>`_].
|
||||
|
||||
* The rule and script ``build_country_flh`` are removed as they are no longer
|
||||
used or maintained.
|
||||
|
||||
* The connection cost of generators in :mod:`simplify_network` are now reported
|
||||
in ``resources/connection_costs_s{simpl}.csv``
|
||||
[`#261 <https://github.com/PyPSA/pypsa-eur/pull/261>`_].
|
||||
|
||||
* The tutorial cutout was renamed from ``cutouts/europe-2013-era5.nc`` to
|
||||
``cutouts/be-03-2013-era5.nc`` to accomodate tutorial and productive
|
||||
cutouts side-by-side.
|
||||
|
||||
* The flag ``keep_all_available_areas`` in the configuration for renewable
|
||||
potentials was deprecated and now defaults to ``True``.
|
||||
|
||||
* Update dependencies in ``envs/environment.yaml``
|
||||
[`#257 <https://github.com/PyPSA/pypsa-eur/pull/257>`_]
|
||||
|
||||
* Continuous integration testing switches to Github Actions from Travis CI
|
||||
[`#252 <https://github.com/PyPSA/pypsa-eur/pull/252>`_].
|
||||
|
||||
* Documentation on readthedocs.io is now built with ``pip`` only and no longer
|
||||
requires ``conda`` [`#267 <https://github.com/PyPSA/pypsa-eur/pull/267>`_].
|
||||
|
||||
* Use ``Citation.cff`` [`#273 <https://github.com/PyPSA/pypsa-eur/pull/273>`_].
|
||||
|
||||
**Bugs and Compatibility**
|
||||
|
||||
|
||||
* Support for PyPSA v0.18 [`#268 <https://github.com/PyPSA/pypsa-eur/pull/268>`_].
|
||||
|
||||
* Minimum Python version set to ``3.8``.
|
||||
|
||||
* Removed ``six`` dependency [`#245 <https://github.com/PyPSA/pypsa-eur/pull/245>`_].
|
||||
|
||||
* Update :mod:`plot_network` and :mod:`make_summary` rules to latest PyPSA
|
||||
versions [`#270 <https://github.com/PyPSA/pypsa-eur/pull/270>`_].
|
||||
|
||||
* Keep converter links to store components when using the ``ATK``
|
||||
wildcard and only remove DC links [`#214 <https://github.com/PyPSA/pypsa-eur/pull/214>`_].
|
||||
|
||||
* Value for ``co2base`` in ``config.yaml`` adjusted to 1.487e9 t CO2-eq
|
||||
(from 3.1e9 t CO2-eq). The new value represents emissions related to the
|
||||
electricity sector for EU+UK+Balkan. The old value was too high and used when
|
||||
the emissions wildcard in ``{opts}`` was used
|
||||
[`#233 <https://github.com/PyPSA/pypsa-eur/pull/233>`_].
|
||||
|
||||
* Add escape in :mod:`base_network` if all TYNDP links are already
|
||||
contained in the network
|
||||
[`#246 <https://github.com/PyPSA/pypsa-eur/pull/246>`_].
|
||||
|
||||
* In :mod:`solve_operations_network` the optimised capacities are now
|
||||
fixed for all extendable links, not only HVDC links
|
||||
[`#244 <https://github.com/PyPSA/pypsa-eur/pull/244>`_].
|
||||
|
||||
* The ``focus_weights`` are now also considered when pre-clustering in
|
||||
the :mod:`simplify_network` rule
|
||||
[`#241 <https://github.com/PyPSA/pypsa-eur/pull/241>`_].
|
||||
|
||||
* in :mod:`build_renewable_profile` where offshore wind profiles could
|
||||
no longer be created [`#249 <https://github.com/PyPSA/pypsa-eur/pull/249>`_].
|
||||
|
||||
* Lower expansion limit of extendable carriers is now set to the
|
||||
existing capacity, i.e. ``p_nom_min = p_nom`` (0 before). Simultaneously, the
|
||||
upper limit (``p_nom_max``) is now the maximum of the installed capacity
|
||||
(``p_nom``) and the previous estimate based on land availability (``p_nom_max``)
|
||||
[`#260 <https://github.com/PyPSA/pypsa-eur/pull/260>`_].
|
||||
|
||||
* Solving an operations network now includes optimized store capacities
|
||||
as well. Before only lines, links, generators and storage units were considered
|
||||
[`#269 <https://github.com/PyPSA/pypsa-eur/pull/269>`_].
|
||||
|
||||
* With ``load_shedding: true`` in the solving options of ``config.yaml``
|
||||
load shedding generators are only added at the AC buses, excluding buses for H2
|
||||
and battery stores [`#269 <https://github.com/PyPSA/pypsa-eur/pull/269>`_].
|
||||
|
||||
* Delete duplicated capital costs at battery discharge link
|
||||
[`#240 <https://github.com/PyPSA/pypsa-eur/pull/240>`_].
|
||||
|
||||
* Propagate the solver log file name to the solver. Previously, the
|
||||
PyPSA network solving functions were not told about the solver logfile specified
|
||||
in the Snakemake file [`#247 <https://github.com/PyPSA/pypsa-eur/pull/247>`_]
|
||||
|
||||
PyPSA-Eur 0.3.0 (7th December 2020)
|
||||
==================================
|
||||
===================================
|
||||
|
||||
**New Features**
|
||||
|
||||
@ -49,6 +224,7 @@ Using the ``{opts}`` wildcard for scenarios:
|
||||
uses the `tsam <https://tsam.readthedocs.io/en/latest/index.html>`_ package
|
||||
[`#186 <https://github.com/PyPSA/pypsa-eur/pull/186>`_].
|
||||
|
||||
|
||||
More OPSD integration:
|
||||
|
||||
* Add renewable power plants from `OPSD <https://data.open-power-system-data.org/renewable_power_plants/2020-08-25>`_ to the network for specified technologies.
|
||||
@ -206,7 +382,7 @@ Release Process
|
||||
|
||||
* Tag a release on Github via ``git tag v0.x.x``, ``git push``, ``git push --tags``. Include release notes in the tag message.
|
||||
|
||||
* Upload code to `zenodo code repository <https://doi.org/10.5281/zenodo.3520874>`_ with `GNU GPL 3.0 <https://www.gnu.org/licenses/gpl-3.0.en.html>`_ license.
|
||||
* Upload code to `zenodo code repository <https://doi.org/10.5281/zenodo.3520874>`_ with `MIT license <https://opensource.org/licenses/MIT>`_.
|
||||
|
||||
* Create pre-built networks for ``config.default.yaml`` by running ``snakemake -j 1 extra_components_all_networks``.
|
||||
|
||||
|
21
doc/requirements.txt
Normal file
21
doc/requirements.txt
Normal file
@ -0,0 +1,21 @@
|
||||
# SPDX-FileCopyrightText: : 2019-2021 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: CC0-1.0
|
||||
|
||||
sphinx
|
||||
sphinx_rtd_theme
|
||||
|
||||
pypsa
|
||||
vresutils>=0.3.1
|
||||
powerplantmatching>=0.4.8
|
||||
atlite>=0.2.2
|
||||
dask<=2021.3.1
|
||||
|
||||
# cartopy
|
||||
scikit-learn
|
||||
pycountry
|
||||
pyyaml
|
||||
seaborn
|
||||
memory_profiler
|
||||
tables
|
||||
descartes
|
@ -14,7 +14,7 @@ Tutorial
|
||||
<iframe width="832" height="468" src="https://www.youtube.com/embed/mAwhQnNRIvs" frameborder="0" allow="accelerometer; autoplay; encrypted-media; gyroscope; picture-in-picture" allowfullscreen></iframe>
|
||||
|
||||
Before getting started with **PyPSA-Eur** it makes sense to be familiar
|
||||
with its general modelling framework `PyPSA <https://pypsa.readthedocs.io>`_.
|
||||
with its general modelling framework `PyPSA <https://pypsa.readthedocs.io>`__.
|
||||
|
||||
Running the tutorial requires limited computational resources compared to the full model,
|
||||
which allows the user to explore most of its functionalities on a local machine.
|
||||
@ -35,15 +35,15 @@ To run the tutorial, use this as your configuration file ``config.yaml``.
|
||||
|
||||
.../pypsa-eur % cp config.tutorial.yaml config.yaml
|
||||
|
||||
This configuration is set to download a reduced data set via the rules :mod:`retrieve_databundle`,
|
||||
:mod:`retrieve_natura_raster`, :mod:`retrieve_cutout` totalling at less than 250 MB.
|
||||
This configuration is set to download a reduced data set via the rules :mod:`retrieve_databundle`
|
||||
and :mod:`retrieve_cutout` totalling at less than 250 MB.
|
||||
The full set of data dependencies would consume 5.3 GB.
|
||||
For more information on the data dependencies of PyPSA-Eur, continue reading :ref:`data`.
|
||||
|
||||
How to customise PyPSA-Eur?
|
||||
===========================
|
||||
|
||||
The model can be adapted to only include selected countries (e.g. Germany) instead of all European countries to limit the spatial scope.
|
||||
The model can be adapted to only include selected countries (e.g. Belgium) instead of all European countries to limit the spatial scope.
|
||||
|
||||
.. literalinclude:: ../config.tutorial.yaml
|
||||
:language: yaml
|
||||
@ -53,41 +53,43 @@ Likewise, the example's temporal scope can be restricted (e.g. to a single month
|
||||
|
||||
.. literalinclude:: ../config.tutorial.yaml
|
||||
:language: yaml
|
||||
:lines: 22-25
|
||||
:start-at: snapshots:
|
||||
:end-before: enable:
|
||||
|
||||
It is also possible to allow less or more carbon-dioxide emissions. Here, we limit the emissions of Germany 100 Megatonnes per year.
|
||||
|
||||
.. literalinclude:: ../config.tutorial.yaml
|
||||
:language: yaml
|
||||
:lines: 36,38
|
||||
:lines: 40,42
|
||||
|
||||
PyPSA-Eur also includes a database of existing conventional powerplants.
|
||||
We can select which types of powerplants we like to be included with fixed capacities:
|
||||
|
||||
.. literalinclude:: ../config.tutorial.yaml
|
||||
:language: yaml
|
||||
:lines: 36,52
|
||||
:lines: 40,56
|
||||
|
||||
To accurately model the temporal and spatial availability of renewables such as wind and solar energy, we rely on historical weather data.
|
||||
It is advisable to adapt the required range of coordinates to the selection of countries.
|
||||
|
||||
.. literalinclude:: ../config.tutorial.yaml
|
||||
:language: yaml
|
||||
:lines: 54-62
|
||||
:start-at: atlite:
|
||||
:end-before: renewable:
|
||||
|
||||
We can also decide which weather data source should be used to calculate potentials and capacity factor time-series for each carrier.
|
||||
For example, we may want to use the ERA-5 dataset for solar and not the default SARAH-2 dataset.
|
||||
|
||||
.. literalinclude:: ../config.tutorial.yaml
|
||||
:language: yaml
|
||||
:lines: 64,107-108
|
||||
:lines: 67,110,111
|
||||
|
||||
Finally, it is possible to pick a solver. For instance, this tutorial uses the open-source solvers CBC and Ipopt and does not rely
|
||||
on the commercial solvers Gurobi or CPLEX (for which free academic licenses are available).
|
||||
|
||||
.. literalinclude:: ../config.tutorial.yaml
|
||||
:language: yaml
|
||||
:lines: 170,180-181
|
||||
:lines: 173,183,184
|
||||
|
||||
.. note::
|
||||
|
||||
@ -126,11 +128,6 @@ orders ``snakemake`` to run the script ``solve_network`` that produces the solve
|
||||
|
||||
.. until https://github.com/snakemake/snakemake/issues/46 closed
|
||||
|
||||
.. warning::
|
||||
On Windows the previous command may currently cause a ``MissingRuleException`` due to problems with output files in subfolders.
|
||||
This is an `open issue <https://github.com/snakemake/snakemake/issues/46>`_ at `snakemake <https://snakemake.readthedocs.io/>`_.
|
||||
Windows users should add the option ``--keep-target-files`` to the command or instead run ``snakemake -j 1 solve_all_networks``.
|
||||
|
||||
This triggers a workflow of multiple preceding jobs that depend on each rule's inputs and outputs:
|
||||
|
||||
.. graphviz::
|
||||
@ -271,7 +268,8 @@ the wildcards given in ``scenario`` in the configuration file ``config.yaml`` ar
|
||||
|
||||
.. literalinclude:: ../config.tutorial.yaml
|
||||
:language: yaml
|
||||
:lines: 14-18
|
||||
:start-at: scenario:
|
||||
:end-before: countries:
|
||||
|
||||
In this example we would not only solve a 6-node model of Germany but also a 2-node model.
|
||||
|
||||
|
@ -126,7 +126,7 @@ The ``{technology}`` wildcard
|
||||
The ``{technology}`` wildcard specifies for which renewable energy technology to produce availablity time
|
||||
series and potentials using the rule :mod:`build_renewable_profiles`.
|
||||
It can take the values ``onwind``, ``offwind-ac``, ``offwind-dc``, and ``solar`` but **not** ``hydro``
|
||||
(since hydroelectric plant profiles are created by a different rule.
|
||||
(since hydroelectric plant profiles are created by a different rule).
|
||||
|
||||
The wildcard can moreover be used to create technology specific figures and summaries.
|
||||
For instance ``{technology}`` can be used to plot regionally disaggregated potentials
|
||||
|
@ -1,32 +0,0 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
name: pypsa-eur-docs
|
||||
channels:
|
||||
- conda-forge
|
||||
dependencies:
|
||||
- python<=3.7
|
||||
- pip
|
||||
- pypsa>=0.17.1
|
||||
- atlite>=0.2.2
|
||||
- dask<=2021.3.1 # until https://github.com/dask/dask/issues/7583 is solved
|
||||
- pre-commit
|
||||
|
||||
# Dependencies of the workflow itself
|
||||
- scikit-learn
|
||||
- pycountry
|
||||
- seaborn
|
||||
- memory_profiler
|
||||
- yaml
|
||||
- pytables
|
||||
- powerplantmatching>=0.4.8
|
||||
|
||||
# GIS dependencies have to come all from conda-forge
|
||||
- cartopy
|
||||
- descartes
|
||||
|
||||
- pip:
|
||||
- vresutils==0.3.1
|
||||
- sphinx
|
||||
- sphinx_rtd_theme
|
@ -1,6 +1,6 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: CC0-1.0
|
||||
|
||||
name: pypsa-eur
|
||||
channels:
|
||||
@ -11,255 +11,301 @@ dependencies:
|
||||
- _libgcc_mutex=0.1
|
||||
- _openmp_mutex=4.5
|
||||
- affine=2.3.0
|
||||
- alsa-lib=1.2.3
|
||||
- amply=0.1.4
|
||||
- appdirs=1.4.4
|
||||
- atlite=0.0.3
|
||||
- attrs=20.3.0
|
||||
- atlite=0.2.5
|
||||
- attrs=21.2.0
|
||||
- backcall=0.2.0
|
||||
- backports=1.0
|
||||
- backports.functools_lru_cache=1.6.1
|
||||
- beautifulsoup4=4.9.3
|
||||
- blosc=1.20.1
|
||||
- bokeh=2.2.3
|
||||
- boost-cpp=1.72.0
|
||||
- backports.functools_lru_cache=1.6.4
|
||||
- beautifulsoup4=4.10.0
|
||||
- blosc=1.21.0
|
||||
- bokeh=2.3.3
|
||||
- boost-cpp=1.74.0
|
||||
- bottleneck=1.3.2
|
||||
- brotlipy=0.7.0
|
||||
- bzip2=1.0.8
|
||||
- c-ares=1.17.1
|
||||
- ca-certificates=2020.11.8
|
||||
- c-ares=1.17.2
|
||||
- ca-certificates=2021.5.30
|
||||
- cairo=1.16.0
|
||||
- cartopy=0.17.0
|
||||
- certifi=2020.11.8
|
||||
- cffi=1.14.4
|
||||
- cartopy=0.19.0.post1
|
||||
- cdsapi=0.5.1
|
||||
- certifi=2021.5.30
|
||||
- cffi=1.14.6
|
||||
- cfitsio=3.470
|
||||
- cftime=1.3.0
|
||||
- chardet=3.0.4
|
||||
- cftime=1.5.0
|
||||
- chardet=4.0.0
|
||||
- charset-normalizer=2.0.0
|
||||
- click=7.1.2
|
||||
- click-plugins=1.1.1
|
||||
- cligj=0.7.1
|
||||
- cloudpickle=1.6.0
|
||||
- cligj=0.7.2
|
||||
- cloudpickle=2.0.0
|
||||
- coincbc=2.10.5
|
||||
- conda=4.9.2
|
||||
- conda-package-handling=1.7.2
|
||||
- configargparse=1.2.3
|
||||
- cryptography=3.2.1
|
||||
- curl=7.71.1
|
||||
- colorama=0.4.4
|
||||
- conda=4.10.3
|
||||
- conda-package-handling=1.7.3
|
||||
- configargparse=1.5.2
|
||||
- connection_pool=0.0.3
|
||||
- country_converter=0.7.3
|
||||
- cryptography=3.4.7
|
||||
- curl=7.79.0
|
||||
- cycler=0.10.0
|
||||
- cytoolz=0.11.0
|
||||
- dask=2.30.0
|
||||
- dask-core=2.30.0
|
||||
- dask=2021.3.1
|
||||
- dask-core=2021.3.1
|
||||
- datrie=0.8.2
|
||||
- dbus=1.13.6
|
||||
- decorator=4.4.2
|
||||
- deprecation=2.1.0
|
||||
- descartes=1.1.0
|
||||
- distributed=2.30.1
|
||||
- docutils=0.16
|
||||
- entsoe-py=0.2.10
|
||||
- expat=2.2.9
|
||||
- fiona=1.8.13
|
||||
- distributed=2021.4.1
|
||||
- distro=1.5.0
|
||||
- docutils=0.17.1
|
||||
- entsoe-py=0.3.7
|
||||
- et_xmlfile=1.0.1
|
||||
- expat=2.4.1
|
||||
- filelock=3.0.12
|
||||
- fiona=1.8.18
|
||||
- fontconfig=2.13.1
|
||||
- freetype=2.10.4
|
||||
- freexl=1.0.5
|
||||
- fsspec=0.8.4
|
||||
- gdal=3.0.4
|
||||
- geographiclib=1.50
|
||||
- geopandas=0.8.1
|
||||
- geopy=2.0.0
|
||||
- geos=3.8.1
|
||||
- freexl=1.0.6
|
||||
- fsspec=2021.8.1
|
||||
- gdal=3.2.1
|
||||
- geographiclib=1.52
|
||||
- geopandas=0.9.0
|
||||
- geopandas-base=0.9.0
|
||||
- geopy=2.2.0
|
||||
- geos=3.9.1
|
||||
- geotiff=1.6.0
|
||||
- gettext=0.19.8.1
|
||||
- giflib=5.2.1
|
||||
- gitdb=4.0.5
|
||||
- gitpython=3.1.11
|
||||
- glib=2.66.3
|
||||
- glpk=4.65
|
||||
- gmp=6.2.1
|
||||
- hdf4=4.2.13
|
||||
- gitdb=4.0.7
|
||||
- gitpython=3.1.23
|
||||
- glib=2.68.4
|
||||
- glib-tools=2.68.4
|
||||
- graphite2=1.3.13
|
||||
- gst-plugins-base=1.18.5
|
||||
- gstreamer=1.18.5
|
||||
- harfbuzz=2.9.1
|
||||
- hdf4=4.2.15
|
||||
- hdf5=1.10.6
|
||||
- heapdict=1.0.1
|
||||
- icu=64.2
|
||||
- idna=2.10
|
||||
- importlib-metadata=3.1.1
|
||||
- importlib_metadata=3.1.1
|
||||
- ipopt=3.13.2
|
||||
- ipython=7.19.0
|
||||
- icu=68.1
|
||||
- idna=3.1
|
||||
- importlib-metadata=4.8.1
|
||||
- iniconfig=1.1.1
|
||||
- ipython=7.27.0
|
||||
- ipython_genutils=0.2.0
|
||||
- jedi=0.17.2
|
||||
- jinja2=2.11.2
|
||||
- joblib=0.17.0
|
||||
- jdcal=1.4.1
|
||||
- jedi=0.18.0
|
||||
- jinja2=3.0.1
|
||||
- joblib=1.0.1
|
||||
- jpeg=9d
|
||||
- json-c=0.13.1
|
||||
- json-c=0.15
|
||||
- jsonschema=3.2.0
|
||||
- jupyter_core=4.7.0
|
||||
- jupyter_core=4.8.1
|
||||
- kealib=1.4.14
|
||||
- kiwisolver=1.3.1
|
||||
- krb5=1.17.2
|
||||
- lcms2=2.11
|
||||
- ld_impl_linux-64=2.35.1
|
||||
- libarchive=3.3.3
|
||||
- kiwisolver=1.3.2
|
||||
- krb5=1.19.2
|
||||
- lcms2=2.12
|
||||
- ld_impl_linux-64=2.36.1
|
||||
- libarchive=3.5.1
|
||||
- libblas=3.9.0
|
||||
- libcblas=3.9.0
|
||||
- libcurl=7.71.1
|
||||
- libclang=11.1.0
|
||||
- libcurl=7.79.0
|
||||
- libdap4=3.20.6
|
||||
- libedit=3.1.20191231
|
||||
- libev=4.33
|
||||
- libffi=3.3
|
||||
- libgcc-ng=9.3.0
|
||||
- libgdal=3.0.4
|
||||
- libgfortran-ng=7.5.0
|
||||
- libgfortran4=7.5.0
|
||||
- libgfortran5=9.3.0
|
||||
- libglib=2.66.3
|
||||
- libgomp=9.3.0
|
||||
- libevent=2.1.10
|
||||
- libffi=3.4.2
|
||||
- libgcc-ng=11.2.0
|
||||
- libgdal=3.2.1
|
||||
- libgfortran-ng=11.2.0
|
||||
- libgfortran5=11.2.0
|
||||
- libglib=2.68.4
|
||||
- libgomp=11.2.0
|
||||
- libiconv=1.16
|
||||
- libkml=1.3.0
|
||||
- liblapack=3.9.0
|
||||
- libllvm11=11.1.0
|
||||
- libnetcdf=4.7.4
|
||||
- libnghttp2=1.41.0
|
||||
- libopenblas=0.3.12
|
||||
- libnghttp2=1.43.0
|
||||
- libogg=1.3.4
|
||||
- libopenblas=0.3.17
|
||||
- libopus=1.3.1
|
||||
- libpng=1.6.37
|
||||
- libpq=12.3
|
||||
- libsolv=0.7.16
|
||||
- libpq=13.3
|
||||
- librttopo=1.1.0
|
||||
- libsolv=0.7.19
|
||||
- libspatialindex=1.9.3
|
||||
- libspatialite=4.3.0a
|
||||
- libssh2=1.9.0
|
||||
- libstdcxx-ng=9.3.0
|
||||
- libtiff=4.1.0
|
||||
- libspatialite=5.0.1
|
||||
- libssh2=1.10.0
|
||||
- libstdcxx-ng=11.2.0
|
||||
- libtiff=4.2.0
|
||||
- libuuid=2.32.1
|
||||
- libwebp-base=1.1.0
|
||||
- libvorbis=1.3.7
|
||||
- libwebp-base=1.2.1
|
||||
- libxcb=1.13
|
||||
- libxml2=2.9.10
|
||||
- libxkbcommon=1.0.3
|
||||
- libxml2=2.9.12
|
||||
- libxslt=1.1.33
|
||||
- locket=0.2.0
|
||||
- lxml=4.6.2
|
||||
- lz4-c=1.9.2
|
||||
- lxml=4.6.3
|
||||
- lz4-c=1.9.3
|
||||
- lzo=2.10
|
||||
- mamba=0.7.3
|
||||
- markupsafe=1.1.1
|
||||
- matplotlib-base=3.3.3
|
||||
- mamba=0.15.3
|
||||
- mapclassify=2.4.3
|
||||
- markupsafe=2.0.1
|
||||
- matplotlib=3.4.3
|
||||
- matplotlib-base=3.4.3
|
||||
- matplotlib-inline=0.1.3
|
||||
- memory_profiler=0.58.0
|
||||
- metis=5.1.0
|
||||
- mock=4.0.2
|
||||
- msgpack-python=1.0.0
|
||||
- mock=4.0.3
|
||||
- more-itertools=8.10.0
|
||||
- msgpack-python=1.0.2
|
||||
- munch=2.5.0
|
||||
- nbformat=5.0.8
|
||||
- mysql-common=8.0.25
|
||||
- mysql-libs=8.0.25
|
||||
- nbformat=5.1.3
|
||||
- ncurses=6.2
|
||||
- netcdf4=1.5.4
|
||||
- networkx=2.5
|
||||
- nose=1.3.7
|
||||
- numexpr=2.7.1
|
||||
- numpy=1.19.0
|
||||
- netcdf4=1.5.6
|
||||
- networkx=2.6.3
|
||||
- nspr=4.30
|
||||
- nss=3.69
|
||||
- numexpr=2.7.3
|
||||
- numpy=1.21.2
|
||||
- olefile=0.46
|
||||
- openjpeg=2.3.1
|
||||
- openssl=1.1.1h
|
||||
- owslib=0.20.0
|
||||
- packaging=20.7
|
||||
- pandas=1.1.4
|
||||
- parso=0.7.1
|
||||
- partd=1.1.0
|
||||
- openjdk=11.0.9.1
|
||||
- openjpeg=2.4.0
|
||||
- openpyxl=3.0.8
|
||||
- openssl=1.1.1l
|
||||
- packaging=21.0
|
||||
- pandas=1.2.5
|
||||
- parso=0.8.2
|
||||
- partd=1.2.0
|
||||
- patsy=0.5.1
|
||||
- pcre=8.44
|
||||
- pcre=8.45
|
||||
- pexpect=4.8.0
|
||||
- pickleshare=0.7.5
|
||||
- pillow=8.0.1
|
||||
- pip=20.3.1
|
||||
- pixman=0.38.0
|
||||
- pillow=8.2.0
|
||||
- pip=21.2.4
|
||||
- pixman=0.40.0
|
||||
- pluggy=1.0.0
|
||||
- ply=3.11
|
||||
- poppler=0.87.0
|
||||
- poppler-data=0.4.10
|
||||
- postgresql=12.3
|
||||
- poppler=0.89.0
|
||||
- poppler-data=0.4.11
|
||||
- postgresql=13.3
|
||||
- powerplantmatching=0.4.8
|
||||
- progressbar2=3.53.1
|
||||
- proj=7.0.0
|
||||
- prompt-toolkit=3.0.8
|
||||
- psutil=5.7.3
|
||||
- proj=7.2.0
|
||||
- prompt-toolkit=3.0.20
|
||||
- psutil=5.8.0
|
||||
- pthread-stubs=0.4
|
||||
- ptyprocess=0.6.0
|
||||
- pulp=2.3.1
|
||||
- ptyprocess=0.7.0
|
||||
- pulp=2.5.0
|
||||
- py=1.10.0
|
||||
- pycosat=0.6.3
|
||||
- pycountry=20.7.3
|
||||
- pycparser=2.20
|
||||
- pyepsg=0.4.0
|
||||
- pygments=2.7.2
|
||||
- pykdtree=1.3.4
|
||||
- pyomo=5.7.1
|
||||
- pyopenssl=20.0.0
|
||||
- pygments=2.10.0
|
||||
- pyomo=6.1.2
|
||||
- pyopenssl=20.0.1
|
||||
- pyparsing=2.4.7
|
||||
- pyproj=2.6.1.post1
|
||||
- pypsa=0.17.1
|
||||
- pyproj=3.1.0
|
||||
- pypsa=0.18.0
|
||||
- pyqt=5.12.3
|
||||
- pyqt-impl=5.12.3
|
||||
- pyqt5-sip=4.19.18
|
||||
- pyqtchart=5.12
|
||||
- pyqtwebengine=5.12.1
|
||||
- pyrsistent=0.17.3
|
||||
- pyshp=2.1.2
|
||||
- pyshp=2.1.3
|
||||
- pysocks=1.7.1
|
||||
- pytables=3.6.1
|
||||
- python=3.8.6
|
||||
- python-dateutil=2.8.1
|
||||
- python-utils=2.4.0
|
||||
- python_abi=3.8
|
||||
- pytz=2020.4
|
||||
- pyutilib=6.0.0
|
||||
- pyyaml=5.3.1
|
||||
- rasterio=1.1.5
|
||||
- pytest=6.2.5
|
||||
- python=3.9.7
|
||||
- python-dateutil=2.8.2
|
||||
- python-utils=2.5.6
|
||||
- python_abi=3.9
|
||||
- pytz=2021.1
|
||||
- pyyaml=5.4.1
|
||||
- qt=5.12.9
|
||||
- rasterio=1.2.6
|
||||
- ratelimiter=1.2.0
|
||||
- readline=8.0
|
||||
- reproc=14.2.1
|
||||
- reproc-cpp=14.2.1
|
||||
- requests=2.25.0
|
||||
- rtree=0.9.4
|
||||
- readline=8.1
|
||||
- reproc=14.2.3
|
||||
- reproc-cpp=14.2.3
|
||||
- requests=2.26.0
|
||||
- rtree=0.9.7
|
||||
- ruamel_yaml=0.15.80
|
||||
- scikit-learn=0.23.2
|
||||
- scipy=1.5.3
|
||||
- seaborn=0.11.0
|
||||
- seaborn-base=0.11.0
|
||||
- setuptools=49.6.0
|
||||
- scikit-learn=0.24.2
|
||||
- scipy=1.7.1
|
||||
- seaborn=0.11.2
|
||||
- seaborn-base=0.11.2
|
||||
- setuptools=58.0.4
|
||||
- setuptools-scm=6.3.2
|
||||
- setuptools_scm=6.3.2
|
||||
- shapely=1.7.1
|
||||
- six=1.15.0
|
||||
- smmap=3.0.4
|
||||
- snakemake-minimal=5.30.1
|
||||
- six=1.16.0
|
||||
- smart_open=5.2.1
|
||||
- smmap=3.0.5
|
||||
- snakemake-minimal=6.8.0
|
||||
- snuggs=1.4.7
|
||||
- sortedcontainers=2.3.0
|
||||
- sortedcontainers=2.4.0
|
||||
- soupsieve=2.0.1
|
||||
- sqlite=3.34.0
|
||||
- statsmodels=0.12.1
|
||||
- tbb=2020.2
|
||||
- tblib=1.6.0
|
||||
- threadpoolctl=2.1.0
|
||||
- tiledb=1.7.7
|
||||
- tk=8.6.10
|
||||
- sqlite=3.36.0
|
||||
- statsmodels=0.12.2
|
||||
- stopit=1.1.2
|
||||
- tabula-py=2.2.0
|
||||
- tabulate=0.8.9
|
||||
- tblib=1.7.0
|
||||
- threadpoolctl=2.2.0
|
||||
- tiledb=2.2.9
|
||||
- tk=8.6.11
|
||||
- toml=0.10.2
|
||||
- tomli=1.2.1
|
||||
- toolz=0.11.1
|
||||
- toposort=1.5
|
||||
- toposort=1.6
|
||||
- tornado=6.1
|
||||
- tqdm=4.54.1
|
||||
- traitlets=5.0.5
|
||||
- typing_extensions=3.7.4.3
|
||||
- tzcode=2020a
|
||||
- urllib3=1.25.11
|
||||
- tqdm=4.62.3
|
||||
- traitlets=5.1.0
|
||||
- typing_extensions=3.10.0.2
|
||||
- tzcode=2021a
|
||||
- tzdata=2021a
|
||||
- urllib3=1.26.6
|
||||
- wcwidth=0.2.5
|
||||
- wheel=0.36.1
|
||||
- wheel=0.37.0
|
||||
- wrapt=1.12.1
|
||||
- xarray=0.16.2
|
||||
- xerces-c=3.2.2
|
||||
- xlrd=1.2.0
|
||||
- xarray=0.19.0
|
||||
- xerces-c=3.2.3
|
||||
- xlrd=2.0.1
|
||||
- xorg-fixesproto=5.0
|
||||
- xorg-inputproto=2.3.2
|
||||
- xorg-kbproto=1.0.7
|
||||
- xorg-libice=1.0.10
|
||||
- xorg-libsm=1.2.3
|
||||
- xorg-libx11=1.6.12
|
||||
- xorg-libx11=1.7.2
|
||||
- xorg-libxau=1.0.9
|
||||
- xorg-libxdmcp=1.1.3
|
||||
- xorg-libxext=1.3.4
|
||||
- xorg-libxfixes=5.0.3
|
||||
- xorg-libxi=1.7.10
|
||||
- xorg-libxrender=0.9.10
|
||||
- xorg-libxtst=1.2.3
|
||||
- xorg-recordproto=1.14.2
|
||||
- xorg-renderproto=0.11.1
|
||||
- xorg-xextproto=7.3.0
|
||||
- xorg-xproto=7.0.31
|
||||
- xz=5.2.5
|
||||
- yaml=0.2.5
|
||||
- zict=2.0.0
|
||||
- zipp=3.4.0
|
||||
- zipp=3.5.0
|
||||
- zlib=1.2.11
|
||||
- zstd=1.4.5
|
||||
- zstd=1.4.9
|
||||
- pip:
|
||||
- cdsapi==0.4.0
|
||||
- countrycode==0.2
|
||||
- geokit==1.1.2
|
||||
- glaes==1.1.2
|
||||
- sklearn==0.0
|
||||
- tsam==1.1.0
|
||||
- tsam==1.1.1
|
||||
- vresutils==0.3.1
|
||||
|
@ -1,25 +1,22 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
name: pypsa-eur
|
||||
channels:
|
||||
- conda-forge
|
||||
- bioconda
|
||||
- http://conda.anaconda.org/gurobi
|
||||
dependencies:
|
||||
- python>=3.8
|
||||
- pip
|
||||
- mamba # esp for windows build
|
||||
|
||||
- pypsa>=0.17.1
|
||||
- atlite>=0.2.2
|
||||
- dask<=2021.3.1 # until https://github.com/dask/dask/issues/7583 is solved
|
||||
- pypsa>=0.18.1
|
||||
- atlite>=0.2.6
|
||||
- dask
|
||||
|
||||
# Dependencies of the workflow itself
|
||||
- xlrd
|
||||
- openpyxl
|
||||
- scikit-learn
|
||||
- pycountry
|
||||
- seaborn
|
||||
- snakemake-minimal
|
||||
@ -27,9 +24,20 @@ dependencies:
|
||||
- yaml
|
||||
- pytables
|
||||
- lxml
|
||||
- powerplantmatching>=0.4.8
|
||||
- numpy<=1.19.0 # otherwise macos fails
|
||||
|
||||
- powerplantmatching>=0.5.3
|
||||
- numpy
|
||||
- pandas
|
||||
- geopandas
|
||||
- xarray
|
||||
- netcdf4
|
||||
- networkx
|
||||
- scipy
|
||||
- shapely
|
||||
- progressbar2
|
||||
- pyomo
|
||||
- matplotlib
|
||||
- proj
|
||||
- fiona<=1.18.20 # Till issue https://github.com/Toblerity/Fiona/issues/1085 is not solved
|
||||
|
||||
# Keep in conda environment when calling ipython
|
||||
- ipython
|
||||
@ -37,7 +45,16 @@ dependencies:
|
||||
# GIS dependencies:
|
||||
- cartopy
|
||||
- descartes
|
||||
- fiona # explicit for Windows
|
||||
- rasterio<=1.2.9 # 1.2.10 creates error https://github.com/PyPSA/atlite/issues/238
|
||||
|
||||
# PyPSA-Eur-Sec Dependencies
|
||||
- geopy
|
||||
- tqdm
|
||||
- pytz
|
||||
- country_converter
|
||||
- tabula-py
|
||||
|
||||
- pip:
|
||||
- vresutils==0.3.1
|
||||
- vresutils>=0.3.1
|
||||
- tsam>=1.1.0
|
||||
|
@ -1,6 +1,6 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
import pandas as pd
|
||||
from pathlib import Path
|
||||
@ -119,12 +119,20 @@ def load_network_for_plots(fn, tech_costs, config, combine_hydro_ps=True):
|
||||
# bus_carrier = n.storage_units.bus.map(n.buses.carrier)
|
||||
# n.storage_units.loc[bus_carrier == "heat","carrier"] = "water tanks"
|
||||
|
||||
Nyears = n.snapshot_weightings.sum() / 8760.
|
||||
costs = load_costs(Nyears, tech_costs, config['costs'], config['electricity'])
|
||||
Nyears = n.snapshot_weightings.objective.sum() / 8760.
|
||||
costs = load_costs(tech_costs, config['costs'], config['electricity'], Nyears)
|
||||
update_transmission_costs(n, costs)
|
||||
|
||||
return n
|
||||
|
||||
def update_p_nom_max(n):
|
||||
# if extendable carriers (solar/onwind/...) have capacity >= 0,
|
||||
# e.g. existing assets from the OPSD project are included to the network,
|
||||
# the installed capacity might exceed the expansion limit.
|
||||
# Hence, we update the assumptions.
|
||||
|
||||
n.generators.p_nom_max = n.generators[['p_nom_min', 'p_nom_max']].max(1)
|
||||
|
||||
def aggregate_p_nom(n):
|
||||
return pd.concat([
|
||||
n.generators.groupby("carrier").p_nom_opt.sum(),
|
||||
@ -156,7 +164,6 @@ def aggregate_p_curtailed(n):
|
||||
])
|
||||
|
||||
def aggregate_costs(n, flatten=False, opts=None, existing_only=False):
|
||||
from six import iterkeys, itervalues
|
||||
|
||||
components = dict(Link=("p_nom", "p0"),
|
||||
Generator=("p_nom", "p"),
|
||||
@ -167,8 +174,8 @@ def aggregate_costs(n, flatten=False, opts=None, existing_only=False):
|
||||
|
||||
costs = {}
|
||||
for c, (p_nom, p_attr) in zip(
|
||||
n.iterate_components(iterkeys(components), skip_empty=False),
|
||||
itervalues(components)
|
||||
n.iterate_components(components.keys(), skip_empty=False),
|
||||
components.values()
|
||||
):
|
||||
if c.df.empty: continue
|
||||
if not existing_only: p_nom += "_opt"
|
||||
@ -224,6 +231,7 @@ def mock_snakemake(rulename, **wildcards):
|
||||
import os
|
||||
from pypsa.descriptors import Dict
|
||||
from snakemake.script import Snakemake
|
||||
from packaging.version import Version, parse
|
||||
|
||||
script_dir = Path(__file__).parent.resolve()
|
||||
assert Path.cwd().resolve() == script_dir, \
|
||||
@ -233,7 +241,8 @@ def mock_snakemake(rulename, **wildcards):
|
||||
if os.path.exists(p):
|
||||
snakefile = p
|
||||
break
|
||||
workflow = sm.Workflow(snakefile)
|
||||
kwargs = dict(rerun_triggers=[]) if parse(sm.__version__) > Version("7.7.0") else {}
|
||||
workflow = sm.Workflow(snakefile, overwrite_configfiles=[], **kwargs)
|
||||
workflow.include(snakefile)
|
||||
workflow.global_resources = {}
|
||||
rule = workflow.get_rule(rulename)
|
||||
|
@ -1,6 +1,6 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
# coding: utf-8
|
||||
"""
|
||||
@ -84,7 +84,7 @@ It further adds extendable ``generators`` with **zero** capacity for
|
||||
"""
|
||||
|
||||
import logging
|
||||
from _helpers import configure_logging
|
||||
from _helpers import configure_logging, update_p_nom_max
|
||||
|
||||
import pypsa
|
||||
import pandas as pd
|
||||
@ -93,8 +93,6 @@ import xarray as xr
|
||||
import geopandas as gpd
|
||||
import powerplantmatching as pm
|
||||
from powerplantmatching.export import map_country_bus
|
||||
|
||||
from vresutils.load import timeseries_opsd
|
||||
from vresutils import transfer as vtransfer
|
||||
|
||||
idx = pd.IndexSlice
|
||||
@ -105,6 +103,18 @@ logger = logging.getLogger(__name__)
|
||||
def normed(s): return s/s.sum()
|
||||
|
||||
|
||||
def calculate_annuity(n, r):
|
||||
"""Calculate the annuity factor for an asset with lifetime n years and
|
||||
discount rate of r, e.g. annuity(20, 0.05) * 20 = 1.6"""
|
||||
|
||||
if isinstance(r, pd.Series):
|
||||
return pd.Series(1/n, index=r.index).where(r == 0, r/(1. - 1./(1.+r)**n))
|
||||
elif r > 0:
|
||||
return r / (1. - 1./(1.+r)**n)
|
||||
else:
|
||||
return 1 / n
|
||||
|
||||
|
||||
def _add_missing_carriers_from_costs(n, costs, carriers):
|
||||
missing_carriers = pd.Index(carriers).difference(n.carriers.index)
|
||||
if missing_carriers.empty: return
|
||||
@ -117,24 +127,7 @@ def _add_missing_carriers_from_costs(n, costs, carriers):
|
||||
n.import_components_from_dataframe(emissions, 'Carrier')
|
||||
|
||||
|
||||
def annuity(n, r):
|
||||
"""Calculate the annuity factor for an asset with lifetime n years and
|
||||
discount rate of r, e.g. annuity(20,0.05)*20 = 1.6"""
|
||||
|
||||
if isinstance(r, pd.Series):
|
||||
return pd.Series(1/n, index=r.index).where(r == 0, r/(1. - 1./(1.+r)**n))
|
||||
elif r > 0:
|
||||
return r/(1. - 1./(1.+r)**n)
|
||||
else:
|
||||
return 1/n
|
||||
|
||||
|
||||
def load_costs(Nyears=1., tech_costs=None, config=None, elec_config=None):
|
||||
if tech_costs is None:
|
||||
tech_costs = snakemake.input.tech_costs
|
||||
|
||||
if config is None:
|
||||
config = snakemake.config['costs']
|
||||
def load_costs(tech_costs, config, elec_config, Nyears=1.):
|
||||
|
||||
# set all asset costs and other parameters
|
||||
costs = pd.read_csv(tech_costs, index_col=[0,1]).sort_index()
|
||||
@ -146,7 +139,7 @@ def load_costs(Nyears=1., tech_costs=None, config=None, elec_config=None):
|
||||
fill_values = config["fill_values"]
|
||||
costs = costs.value.unstack().fillna(fill_values)
|
||||
|
||||
costs["capital_cost"] = ((annuity(costs["lifetime"], costs["discount rate"]) +
|
||||
costs["capital_cost"] = ((calculate_annuity(costs["lifetime"], costs["discount rate"]) +
|
||||
costs["FOM"]/100.) *
|
||||
costs["investment"] * Nyears)
|
||||
|
||||
@ -171,8 +164,6 @@ def load_costs(Nyears=1., tech_costs=None, config=None, elec_config=None):
|
||||
marginal_cost=0.,
|
||||
co2_emissions=0.))
|
||||
|
||||
if elec_config is None:
|
||||
elec_config = snakemake.config['electricity']
|
||||
max_hours = elec_config['max_hours']
|
||||
costs.loc["battery"] = \
|
||||
costs_for_storage(costs.loc["battery storage"], costs.loc["battery inverter"],
|
||||
@ -190,9 +181,7 @@ def load_costs(Nyears=1., tech_costs=None, config=None, elec_config=None):
|
||||
return costs
|
||||
|
||||
|
||||
def load_powerplants(ppl_fn=None):
|
||||
if ppl_fn is None:
|
||||
ppl_fn = snakemake.input.powerplants
|
||||
def load_powerplants(ppl_fn):
|
||||
carrier_dict = {'ocgt': 'OCGT', 'ccgt': 'CCGT', 'bioenergy': 'biomass',
|
||||
'ccgt, thermal': 'CCGT', 'hard coal': 'coal'}
|
||||
return (pd.read_csv(ppl_fn, index_col=0, dtype={'bus': 'str'})
|
||||
@ -201,18 +190,18 @@ def load_powerplants(ppl_fn=None):
|
||||
.replace({'carrier': carrier_dict}))
|
||||
|
||||
|
||||
def attach_load(n):
|
||||
substation_lv_i = n.buses.index[n.buses['substation_lv']]
|
||||
regions = (gpd.read_file(snakemake.input.regions).set_index('name')
|
||||
.reindex(substation_lv_i))
|
||||
opsd_load = (pd.read_csv(snakemake.input.load, index_col=0, parse_dates=True)
|
||||
.filter(items=snakemake.config['countries']))
|
||||
def attach_load(n, regions, load, nuts3_shapes, countries, scaling=1.):
|
||||
|
||||
substation_lv_i = n.buses.index[n.buses['substation_lv']]
|
||||
regions = (gpd.read_file(regions).set_index('name')
|
||||
.reindex(substation_lv_i))
|
||||
opsd_load = (pd.read_csv(load, index_col=0, parse_dates=True)
|
||||
.filter(items=countries))
|
||||
|
||||
scaling = snakemake.config.get('load', {}).get('scaling_factor', 1.0)
|
||||
logger.info(f"Load data scaled with scalling factor {scaling}.")
|
||||
opsd_load *= scaling
|
||||
|
||||
nuts3 = gpd.read_file(snakemake.input.nuts3_shapes).set_index('index')
|
||||
nuts3 = gpd.read_file(nuts3_shapes).set_index('index')
|
||||
|
||||
def upsample(cntry, group):
|
||||
l = opsd_load[cntry]
|
||||
@ -229,7 +218,6 @@ def attach_load(n):
|
||||
|
||||
# relative factors 0.6 and 0.4 have been determined from a linear
|
||||
# regression on the country to continent load data
|
||||
# (refer to vresutils.load._upsampling_weights)
|
||||
factors = normed(0.6 * normed(gdp_n) + 0.4 * normed(pop_n))
|
||||
return pd.DataFrame(factors.values * l.values[:,np.newaxis],
|
||||
index=l.index, columns=factors.index)
|
||||
@ -240,7 +228,10 @@ def attach_load(n):
|
||||
n.madd("Load", substation_lv_i, bus=substation_lv_i, p_set=load)
|
||||
|
||||
|
||||
def update_transmission_costs(n, costs, length_factor=1.0, simple_hvdc_costs=False):
|
||||
def update_transmission_costs(n, costs, length_factor=1.0):
|
||||
# TODO: line length factor of lines is applied to lines and links.
|
||||
# Separate the function to distinguish.
|
||||
|
||||
n.lines['capital_cost'] = (n.lines['length'] * length_factor *
|
||||
costs.at['HVAC overhead', 'capital_cost'])
|
||||
|
||||
@ -252,31 +243,29 @@ def update_transmission_costs(n, costs, length_factor=1.0, simple_hvdc_costs=Fal
|
||||
# may be missing. Therefore we have to return here.
|
||||
if n.links.loc[dc_b].empty: return
|
||||
|
||||
if simple_hvdc_costs:
|
||||
costs = (n.links.loc[dc_b, 'length'] * length_factor *
|
||||
costs.at['HVDC overhead', 'capital_cost'])
|
||||
else:
|
||||
costs = (n.links.loc[dc_b, 'length'] * length_factor *
|
||||
((1. - n.links.loc[dc_b, 'underwater_fraction']) *
|
||||
costs.at['HVDC overhead', 'capital_cost'] +
|
||||
n.links.loc[dc_b, 'underwater_fraction'] *
|
||||
costs.at['HVDC submarine', 'capital_cost']) +
|
||||
costs.at['HVDC inverter pair', 'capital_cost'])
|
||||
costs = (n.links.loc[dc_b, 'length'] * length_factor *
|
||||
((1. - n.links.loc[dc_b, 'underwater_fraction']) *
|
||||
costs.at['HVDC overhead', 'capital_cost'] +
|
||||
n.links.loc[dc_b, 'underwater_fraction'] *
|
||||
costs.at['HVDC submarine', 'capital_cost']) +
|
||||
costs.at['HVDC inverter pair', 'capital_cost'])
|
||||
n.links.loc[dc_b, 'capital_cost'] = costs
|
||||
|
||||
|
||||
def attach_wind_and_solar(n, costs):
|
||||
for tech in snakemake.config['renewable']:
|
||||
def attach_wind_and_solar(n, costs, input_profiles, technologies, line_length_factor=1):
|
||||
# TODO: rename tech -> carrier, technologies -> carriers
|
||||
|
||||
for tech in technologies:
|
||||
if tech == 'hydro': continue
|
||||
|
||||
n.add("Carrier", name=tech)
|
||||
with xr.open_dataset(getattr(snakemake.input, 'profile_' + tech)) as ds:
|
||||
with xr.open_dataset(getattr(input_profiles, 'profile_' + tech)) as ds:
|
||||
if ds.indexes['bus'].empty: continue
|
||||
|
||||
suptech = tech.split('-', 2)[0]
|
||||
if suptech == 'offwind':
|
||||
underwater_fraction = ds['underwater_fraction'].to_pandas()
|
||||
connection_cost = (snakemake.config['lines']['length_factor'] *
|
||||
connection_cost = (line_length_factor *
|
||||
ds['average_distance'].to_pandas() *
|
||||
(underwater_fraction *
|
||||
costs.at[tech + '-connection-submarine', 'capital_cost'] +
|
||||
@ -302,8 +291,7 @@ def attach_wind_and_solar(n, costs):
|
||||
p_max_pu=ds['profile'].transpose('time', 'bus').to_pandas())
|
||||
|
||||
|
||||
def attach_conventional_generators(n, costs, ppl):
|
||||
carriers = snakemake.config['electricity']['conventional_carriers']
|
||||
def attach_conventional_generators(n, costs, ppl, carriers):
|
||||
|
||||
_add_missing_carriers_from_costs(n, costs, carriers)
|
||||
|
||||
@ -324,10 +312,7 @@ def attach_conventional_generators(n, costs, ppl):
|
||||
logger.warning(f'Capital costs for conventional generators put to 0 EUR/MW.')
|
||||
|
||||
|
||||
def attach_hydro(n, costs, ppl):
|
||||
if 'hydro' not in snakemake.config['renewable']: return
|
||||
c = snakemake.config['renewable']['hydro']
|
||||
carriers = c.get('carriers', ['ror', 'PHS', 'hydro'])
|
||||
def attach_hydro(n, costs, ppl, profile_hydro, hydro_capacities, carriers, **config):
|
||||
|
||||
_add_missing_carriers_from_costs(n, costs, carriers)
|
||||
|
||||
@ -343,11 +328,11 @@ def attach_hydro(n, costs, ppl):
|
||||
if not inflow_idx.empty:
|
||||
dist_key = ppl.loc[inflow_idx, 'p_nom'].groupby(country).transform(normed)
|
||||
|
||||
with xr.open_dataarray(snakemake.input.profile_hydro) as inflow:
|
||||
with xr.open_dataarray(profile_hydro) as inflow:
|
||||
inflow_countries = pd.Index(country[inflow_idx])
|
||||
missing_c = (inflow_countries.unique()
|
||||
.difference(inflow.indexes['countries']))
|
||||
assert missing_c.empty, (f"'{snakemake.input.profile_hydro}' is missing "
|
||||
assert missing_c.empty, (f"'{profile_hydro}' is missing "
|
||||
f"inflow time-series for at least one country: {', '.join(missing_c)}")
|
||||
|
||||
inflow_t = (inflow.sel(countries=inflow_countries)
|
||||
@ -372,7 +357,8 @@ def attach_hydro(n, costs, ppl):
|
||||
if 'PHS' in carriers and not phs.empty:
|
||||
# fill missing max hours to config value and
|
||||
# assume no natural inflow due to lack of data
|
||||
phs = phs.replace({'max_hours': {0: c['PHS_max_hours']}})
|
||||
max_hours = config.get('PHS_max_hours', 6)
|
||||
phs = phs.replace({'max_hours': {0: max_hours}})
|
||||
n.madd('StorageUnit', phs.index,
|
||||
carrier='PHS',
|
||||
bus=phs['bus'],
|
||||
@ -384,8 +370,11 @@ def attach_hydro(n, costs, ppl):
|
||||
cyclic_state_of_charge=True)
|
||||
|
||||
if 'hydro' in carriers and not hydro.empty:
|
||||
hydro_max_hours = c.get('hydro_max_hours')
|
||||
hydro_stats = pd.read_csv(snakemake.input.hydro_capacities,
|
||||
hydro_max_hours = config.get('hydro_max_hours')
|
||||
|
||||
assert hydro_max_hours is not None, "No path for hydro capacities given."
|
||||
|
||||
hydro_stats = pd.read_csv(hydro_capacities,
|
||||
comment="#", na_values='-', index_col=0)
|
||||
e_target = hydro_stats["E_store[TWh]"].clip(lower=0.2) * 1e6
|
||||
e_installed = hydro.eval('p_nom * max_hours').groupby(hydro.country).sum()
|
||||
@ -413,8 +402,7 @@ def attach_hydro(n, costs, ppl):
|
||||
bus=hydro['bus'],
|
||||
p_nom=hydro['p_nom'],
|
||||
max_hours=hydro_max_hours,
|
||||
capital_cost=(costs.at['hydro', 'capital_cost']
|
||||
if c.get('hydro_capital_cost') else 0.),
|
||||
capital_cost=costs.at['hydro', 'capital_cost'],
|
||||
marginal_cost=costs.at['hydro', 'marginal_cost'],
|
||||
p_max_pu=1., # dispatch
|
||||
p_min_pu=0., # store
|
||||
@ -424,9 +412,7 @@ def attach_hydro(n, costs, ppl):
|
||||
inflow=inflow_t.loc[:, hydro.index])
|
||||
|
||||
|
||||
def attach_extendable_generators(n, costs, ppl):
|
||||
elec_opts = snakemake.config['electricity']
|
||||
carriers = pd.Index(elec_opts['extendable_carriers']['Generator'])
|
||||
def attach_extendable_generators(n, costs, ppl, carriers):
|
||||
|
||||
_add_missing_carriers_from_costs(n, costs, carriers)
|
||||
|
||||
@ -474,12 +460,11 @@ def attach_extendable_generators(n, costs, ppl):
|
||||
|
||||
|
||||
|
||||
def attach_OPSD_renewables(n):
|
||||
def attach_OPSD_renewables(n, techs):
|
||||
|
||||
available = ['DE', 'FR', 'PL', 'CH', 'DK', 'CZ', 'SE', 'GB']
|
||||
tech_map = {'Onshore': 'onwind', 'Offshore': 'offwind', 'Solar': 'solar'}
|
||||
countries = set(available) & set(n.buses.country)
|
||||
techs = snakemake.config['electricity'].get('renewable_capacities_from_OPSD', [])
|
||||
tech_map = {k: v for k, v in tech_map.items() if v in techs}
|
||||
|
||||
if not tech_map:
|
||||
@ -503,13 +488,11 @@ def attach_OPSD_renewables(n):
|
||||
caps = caps / gens_per_bus.reindex(caps.index, fill_value=1)
|
||||
|
||||
n.generators.p_nom.update(gens.bus.map(caps).dropna())
|
||||
n.generators.p_nom_min.update(gens.bus.map(caps).dropna())
|
||||
|
||||
|
||||
|
||||
def estimate_renewable_capacities(n, tech_map=None):
|
||||
if tech_map is None:
|
||||
tech_map = (snakemake.config['electricity']
|
||||
.get('estimate_renewable_capacities_from_capacity_stats', {}))
|
||||
def estimate_renewable_capacities(n, tech_map):
|
||||
|
||||
if len(tech_map) == 0: return
|
||||
|
||||
@ -538,10 +521,10 @@ def estimate_renewable_capacities(n, tech_map=None):
|
||||
.groupby(n.generators.bus.map(n.buses.country))
|
||||
.transform(lambda s: normed(s) * tech_capacities.at[s.name])
|
||||
.where(lambda s: s>0.1, 0.)) # only capacities above 100kW
|
||||
n.generators.loc[tech_i, 'p_nom_min'] = n.generators.loc[tech_i, 'p_nom']
|
||||
|
||||
|
||||
def add_nice_carrier_names(n, config=None):
|
||||
if config is None: config = snakemake.config
|
||||
def add_nice_carrier_names(n, config):
|
||||
carrier_i = n.carriers.index
|
||||
nice_names = (pd.Series(config['plotting']['nice_names'])
|
||||
.reindex(carrier_i).fillna(carrier_i.to_series().str.title()))
|
||||
@ -549,11 +532,9 @@ def add_nice_carrier_names(n, config=None):
|
||||
colors = pd.Series(config['plotting']['tech_colors']).reindex(carrier_i)
|
||||
if colors.isna().any():
|
||||
missing_i = list(colors.index[colors.isna()])
|
||||
logger.warning(f'tech_colors for carriers {missing_i} not defined '
|
||||
'in config.')
|
||||
logger.warning(f'tech_colors for carriers {missing_i} not defined in config.')
|
||||
n.carriers['color'] = colors
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if 'snakemake' not in globals():
|
||||
from _helpers import mock_snakemake
|
||||
@ -561,23 +542,37 @@ if __name__ == "__main__":
|
||||
configure_logging(snakemake)
|
||||
|
||||
n = pypsa.Network(snakemake.input.base_network)
|
||||
Nyears = n.snapshot_weightings.sum() / 8760.
|
||||
Nyears = n.snapshot_weightings.objective.sum() / 8760.
|
||||
|
||||
costs = load_costs(Nyears)
|
||||
ppl = load_powerplants()
|
||||
costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears)
|
||||
ppl = load_powerplants(snakemake.input.powerplants)
|
||||
|
||||
attach_load(n)
|
||||
attach_load(n, snakemake.input.regions, snakemake.input.load, snakemake.input.nuts3_shapes,
|
||||
snakemake.config['countries'], snakemake.config['load']['scaling_factor'])
|
||||
|
||||
update_transmission_costs(n, costs)
|
||||
update_transmission_costs(n, costs, snakemake.config['lines']['length_factor'])
|
||||
|
||||
attach_conventional_generators(n, costs, ppl)
|
||||
attach_wind_and_solar(n, costs)
|
||||
attach_hydro(n, costs, ppl)
|
||||
attach_extendable_generators(n, costs, ppl)
|
||||
carriers = snakemake.config['electricity']['conventional_carriers']
|
||||
attach_conventional_generators(n, costs, ppl, carriers)
|
||||
|
||||
estimate_renewable_capacities(n)
|
||||
attach_OPSD_renewables(n)
|
||||
carriers = snakemake.config['renewable']
|
||||
attach_wind_and_solar(n, costs, snakemake.input, carriers, snakemake.config['lines']['length_factor'])
|
||||
|
||||
add_nice_carrier_names(n)
|
||||
if 'hydro' in snakemake.config['renewable']:
|
||||
carriers = snakemake.config['renewable']['hydro'].pop('carriers', [])
|
||||
attach_hydro(n, costs, ppl, snakemake.input.profile_hydro, snakemake.input.hydro_capacities,
|
||||
carriers, **snakemake.config['renewable']['hydro'])
|
||||
|
||||
carriers = snakemake.config['electricity']['extendable_carriers']['Generator']
|
||||
attach_extendable_generators(n, costs, ppl, carriers)
|
||||
|
||||
tech_map = snakemake.config['electricity'].get('estimate_renewable_capacities_from_capacity_stats', {})
|
||||
estimate_renewable_capacities(n, tech_map)
|
||||
techs = snakemake.config['electricity'].get('renewable_capacities_from_OPSD', [])
|
||||
attach_OPSD_renewables(n, techs)
|
||||
|
||||
update_p_nom_max(n)
|
||||
|
||||
add_nice_carrier_names(n, snakemake.config)
|
||||
|
||||
n.export_to_netcdf(snakemake.output[0])
|
||||
|
@ -1,6 +1,6 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
# coding: utf-8
|
||||
"""
|
||||
@ -64,8 +64,7 @@ idx = pd.IndexSlice
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def attach_storageunits(n, costs):
|
||||
elec_opts = snakemake.config['electricity']
|
||||
def attach_storageunits(n, costs, elec_opts):
|
||||
carriers = elec_opts['extendable_carriers']['StorageUnit']
|
||||
max_hours = elec_opts['max_hours']
|
||||
|
||||
@ -101,8 +100,7 @@ def attach_storageunits(n, costs):
|
||||
cyclic_state_of_charge=True)
|
||||
|
||||
|
||||
def attach_stores(n, costs):
|
||||
elec_opts = snakemake.config['electricity']
|
||||
def attach_stores(n, costs, elec_opts):
|
||||
carriers = elec_opts['extendable_carriers']['Store']
|
||||
|
||||
_add_missing_carriers_from_costs(n, costs, carriers)
|
||||
@ -169,8 +167,7 @@ def attach_stores(n, costs):
|
||||
marginal_cost=costs.at["battery inverter", "marginal_cost"])
|
||||
|
||||
|
||||
def attach_hydrogen_pipelines(n, costs):
|
||||
elec_opts = snakemake.config['electricity']
|
||||
def attach_hydrogen_pipelines(n, costs, elec_opts):
|
||||
ext_carriers = elec_opts['extendable_carriers']
|
||||
as_stores = ext_carriers.get('Store', [])
|
||||
|
||||
@ -210,15 +207,15 @@ if __name__ == "__main__":
|
||||
configure_logging(snakemake)
|
||||
|
||||
n = pypsa.Network(snakemake.input.network)
|
||||
Nyears = n.snapshot_weightings.sum() / 8760.
|
||||
costs = load_costs(Nyears, tech_costs=snakemake.input.tech_costs,
|
||||
config=snakemake.config['costs'],
|
||||
elec_config=snakemake.config['electricity'])
|
||||
elec_config = snakemake.config['electricity']
|
||||
|
||||
Nyears = n.snapshot_weightings.objective.sum() / 8760.
|
||||
costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], elec_config, Nyears)
|
||||
|
||||
attach_storageunits(n, costs)
|
||||
attach_stores(n, costs)
|
||||
attach_hydrogen_pipelines(n, costs)
|
||||
attach_storageunits(n, costs, elec_config)
|
||||
attach_stores(n, costs, elec_config)
|
||||
attach_hydrogen_pipelines(n, costs, elec_config)
|
||||
|
||||
add_nice_carrier_names(n, config=snakemake.config)
|
||||
add_nice_carrier_names(n, snakemake.config)
|
||||
|
||||
n.export_to_netcdf(snakemake.output[0])
|
||||
|
@ -1,10 +1,10 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
# coding: utf-8
|
||||
"""
|
||||
Creates the network topology from a `ENTSO-E map extract <https://github.com/PyPSA/GridKit/tree/master/entsoe>`_ (January 2020) as a PyPSA network.
|
||||
Creates the network topology from a `ENTSO-E map extract <https://github.com/PyPSA/GridKit/tree/master/entsoe>`_ (March 2022) as a PyPSA network.
|
||||
|
||||
Relevant Settings
|
||||
-----------------
|
||||
@ -41,7 +41,7 @@ Relevant Settings
|
||||
Inputs
|
||||
------
|
||||
|
||||
- ``data/entsoegridkit``: Extract from the geographical vector data of the online `ENTSO-E Interactive Map <https://www.entsoe.eu/data/map/>`_ by the `GridKit <https://github.com/pypsa/gridkit>`_ toolkit dating back to January 2020.
|
||||
- ``data/entsoegridkit``: Extract from the geographical vector data of the online `ENTSO-E Interactive Map <https://www.entsoe.eu/data/map/>`_ by the `GridKit <https://github.com/martacki/gridkit>`_ toolkit dating back to March 2022.
|
||||
- ``data/parameter_corrections.yaml``: Corrections for ``data/entsoegridkit``
|
||||
- ``data/links_p_nom.csv``: confer :ref:`links`
|
||||
- ``data/links_tyndp.csv``: List of projects in the `TYNDP 2018 <https://tyndp.entsoe.eu/tyndp2018/>`_ that are at least *in permitting* with fields for start- and endpoint (names and coordinates), length, capacity, construction status, and project reference ID.
|
||||
@ -70,11 +70,10 @@ import yaml
|
||||
import pandas as pd
|
||||
import geopandas as gpd
|
||||
import numpy as np
|
||||
import scipy as sp
|
||||
import networkx as nx
|
||||
|
||||
from scipy import spatial
|
||||
from scipy.sparse import csgraph
|
||||
from six import iteritems
|
||||
from itertools import product
|
||||
|
||||
from shapely.geometry import Point, LineString
|
||||
@ -98,11 +97,11 @@ def _get_country(df):
|
||||
|
||||
|
||||
def _find_closest_links(links, new_links, distance_upper_bound=1.5):
|
||||
treecoords = np.asarray([np.asarray(shapely.wkt.loads(s))[[0, -1]].flatten()
|
||||
treecoords = np.asarray([np.asarray(shapely.wkt.loads(s).coords)[[0, -1]].flatten()
|
||||
for s in links.geometry])
|
||||
querycoords = np.vstack([new_links[['x1', 'y1', 'x2', 'y2']],
|
||||
new_links[['x2', 'y2', 'x1', 'y1']]])
|
||||
tree = sp.spatial.KDTree(treecoords)
|
||||
tree = spatial.KDTree(treecoords)
|
||||
dist, ind = tree.query(querycoords, distance_upper_bound=distance_upper_bound)
|
||||
found_b = ind < len(links)
|
||||
found_i = np.arange(len(new_links)*2)[found_b] % len(new_links)
|
||||
@ -113,9 +112,9 @@ def _find_closest_links(links, new_links, distance_upper_bound=1.5):
|
||||
.sort_index()['i']
|
||||
|
||||
|
||||
def _load_buses_from_eg():
|
||||
buses = (pd.read_csv(snakemake.input.eg_buses, quotechar="'",
|
||||
true_values='t', false_values='f',
|
||||
def _load_buses_from_eg(eg_buses, europe_shape, config_elec):
|
||||
buses = (pd.read_csv(eg_buses, quotechar="'",
|
||||
true_values=['t'], false_values=['f'],
|
||||
dtype=dict(bus_id="str"))
|
||||
.set_index("bus_id")
|
||||
.drop(['station_id'], axis=1)
|
||||
@ -125,19 +124,19 @@ def _load_buses_from_eg():
|
||||
buses['under_construction'] = buses['under_construction'].fillna(False).astype(bool)
|
||||
|
||||
# remove all buses outside of all countries including exclusive economic zones (offshore)
|
||||
europe_shape = gpd.read_file(snakemake.input.europe_shape).loc[0, 'geometry']
|
||||
europe_shape = gpd.read_file(europe_shape).loc[0, 'geometry']
|
||||
europe_shape_prepped = shapely.prepared.prep(europe_shape)
|
||||
buses_in_europe_b = buses[['x', 'y']].apply(lambda p: europe_shape_prepped.contains(Point(p)), axis=1)
|
||||
|
||||
buses_with_v_nom_to_keep_b = buses.v_nom.isin(snakemake.config['electricity']['voltages']) | buses.v_nom.isnull()
|
||||
logger.info("Removing buses with voltages {}".format(pd.Index(buses.v_nom.unique()).dropna().difference(snakemake.config['electricity']['voltages'])))
|
||||
buses_with_v_nom_to_keep_b = buses.v_nom.isin(config_elec['voltages']) | buses.v_nom.isnull()
|
||||
logger.info("Removing buses with voltages {}".format(pd.Index(buses.v_nom.unique()).dropna().difference(config_elec['voltages'])))
|
||||
|
||||
return pd.DataFrame(buses.loc[buses_in_europe_b & buses_with_v_nom_to_keep_b])
|
||||
|
||||
|
||||
def _load_transformers_from_eg(buses):
|
||||
transformers = (pd.read_csv(snakemake.input.eg_transformers, quotechar="'",
|
||||
true_values='t', false_values='f',
|
||||
def _load_transformers_from_eg(buses, eg_transformers):
|
||||
transformers = (pd.read_csv(eg_transformers, quotechar="'",
|
||||
true_values=['t'], false_values=['f'],
|
||||
dtype=dict(transformer_id='str', bus0='str', bus1='str'))
|
||||
.set_index('transformer_id'))
|
||||
|
||||
@ -146,9 +145,9 @@ def _load_transformers_from_eg(buses):
|
||||
return transformers
|
||||
|
||||
|
||||
def _load_converters_from_eg(buses):
|
||||
converters = (pd.read_csv(snakemake.input.eg_converters, quotechar="'",
|
||||
true_values='t', false_values='f',
|
||||
def _load_converters_from_eg(buses, eg_converters):
|
||||
converters = (pd.read_csv(eg_converters, quotechar="'",
|
||||
true_values=['t'], false_values=['f'],
|
||||
dtype=dict(converter_id='str', bus0='str', bus1='str'))
|
||||
.set_index('converter_id'))
|
||||
|
||||
@ -159,15 +158,16 @@ def _load_converters_from_eg(buses):
|
||||
return converters
|
||||
|
||||
|
||||
def _load_links_from_eg(buses):
|
||||
links = (pd.read_csv(snakemake.input.eg_links, quotechar="'", true_values='t', false_values='f',
|
||||
def _load_links_from_eg(buses, eg_links):
|
||||
links = (pd.read_csv(eg_links, quotechar="'", true_values=['t'], false_values=['f'],
|
||||
dtype=dict(link_id='str', bus0='str', bus1='str', under_construction="bool"))
|
||||
.set_index('link_id'))
|
||||
|
||||
links['length'] /= 1e3
|
||||
|
||||
# hotfix
|
||||
links.loc[links.bus1=='6271', 'bus1'] = '6273'
|
||||
# Skagerrak Link is connected to 132kV bus which is removed in _load_buses_from_eg.
|
||||
# Connect to neighboring 380kV bus
|
||||
links.loc[links.bus1=='6396', 'bus1'] = '6398'
|
||||
|
||||
links = _remove_dangling_branches(links, buses)
|
||||
|
||||
@ -177,11 +177,11 @@ def _load_links_from_eg(buses):
|
||||
return links
|
||||
|
||||
|
||||
def _add_links_from_tyndp(buses, links):
|
||||
links_tyndp = pd.read_csv(snakemake.input.links_tyndp)
|
||||
def _add_links_from_tyndp(buses, links, links_tyndp, europe_shape):
|
||||
links_tyndp = pd.read_csv(links_tyndp)
|
||||
|
||||
# remove all links from list which lie outside all of the desired countries
|
||||
europe_shape = gpd.read_file(snakemake.input.europe_shape).loc[0, 'geometry']
|
||||
europe_shape = gpd.read_file(europe_shape).loc[0, 'geometry']
|
||||
europe_shape_prepped = shapely.prepared.prep(europe_shape)
|
||||
x1y1_in_europe_b = links_tyndp[['x1', 'y1']].apply(lambda p: europe_shape_prepped.contains(Point(p)), axis=1)
|
||||
x2y2_in_europe_b = links_tyndp[['x2', 'y2']].apply(lambda p: europe_shape_prepped.contains(Point(p)), axis=1)
|
||||
@ -213,8 +213,9 @@ def _add_links_from_tyndp(buses, links):
|
||||
if links_tyndp["j"].notnull().any():
|
||||
logger.info("TYNDP links already in the dataset (skipping): " + ", ".join(links_tyndp.loc[links_tyndp["j"].notnull(), "Name"]))
|
||||
links_tyndp = links_tyndp.loc[links_tyndp["j"].isnull()]
|
||||
if links_tyndp.empty: return buses, links
|
||||
|
||||
tree = sp.spatial.KDTree(buses[['x', 'y']])
|
||||
tree = spatial.KDTree(buses[['x', 'y']])
|
||||
_, ind0 = tree.query(links_tyndp[["x1", "y1"]])
|
||||
ind0_b = ind0 < len(buses)
|
||||
links_tyndp.loc[ind0_b, "bus0"] = buses.index[ind0[ind0_b]]
|
||||
@ -245,11 +246,13 @@ def _add_links_from_tyndp(buses, links):
|
||||
|
||||
links_tyndp.index = "T" + links_tyndp.index.astype(str)
|
||||
|
||||
return buses, links.append(links_tyndp, sort=True)
|
||||
links = pd.concat([links, links_tyndp], sort=True)
|
||||
|
||||
return buses, links
|
||||
|
||||
|
||||
def _load_lines_from_eg(buses):
|
||||
lines = (pd.read_csv(snakemake.input.eg_lines, quotechar="'", true_values='t', false_values='f',
|
||||
def _load_lines_from_eg(buses, eg_lines):
|
||||
lines = (pd.read_csv(eg_lines, quotechar="'", true_values=['t'], false_values=['f'],
|
||||
dtype=dict(line_id='str', bus0='str', bus1='str',
|
||||
underground="bool", under_construction="bool"))
|
||||
.set_index('line_id')
|
||||
@ -262,19 +265,19 @@ def _load_lines_from_eg(buses):
|
||||
return lines
|
||||
|
||||
|
||||
def _apply_parameter_corrections(n):
|
||||
with open(snakemake.input.parameter_corrections) as f:
|
||||
def _apply_parameter_corrections(n, parameter_corrections):
|
||||
with open(parameter_corrections) as f:
|
||||
corrections = yaml.safe_load(f)
|
||||
|
||||
if corrections is None: return
|
||||
|
||||
for component, attrs in iteritems(corrections):
|
||||
for component, attrs in corrections.items():
|
||||
df = n.df(component)
|
||||
oid = _get_oid(df)
|
||||
if attrs is None: continue
|
||||
|
||||
for attr, repls in iteritems(attrs):
|
||||
for i, r in iteritems(repls):
|
||||
for attr, repls in attrs.items():
|
||||
for i, r in repls.items():
|
||||
if i == 'oid':
|
||||
r = oid.map(repls["oid"]).dropna()
|
||||
elif i == 'index':
|
||||
@ -285,14 +288,14 @@ def _apply_parameter_corrections(n):
|
||||
df.loc[inds, attr] = r[inds].astype(df[attr].dtype)
|
||||
|
||||
|
||||
def _set_electrical_parameters_lines(lines):
|
||||
v_noms = snakemake.config['electricity']['voltages']
|
||||
linetypes = snakemake.config['lines']['types']
|
||||
def _set_electrical_parameters_lines(lines, config):
|
||||
v_noms = config['electricity']['voltages']
|
||||
linetypes = config['lines']['types']
|
||||
|
||||
for v_nom in v_noms:
|
||||
lines.loc[lines["v_nom"] == v_nom, 'type'] = linetypes[v_nom]
|
||||
|
||||
lines['s_max_pu'] = snakemake.config['lines']['s_max_pu']
|
||||
lines['s_max_pu'] = config['lines']['s_max_pu']
|
||||
|
||||
return lines
|
||||
|
||||
@ -304,14 +307,14 @@ def _set_lines_s_nom_from_linetypes(n):
|
||||
)
|
||||
|
||||
|
||||
def _set_electrical_parameters_links(links):
|
||||
def _set_electrical_parameters_links(links, config, links_p_nom):
|
||||
if links.empty: return links
|
||||
|
||||
p_max_pu = snakemake.config['links'].get('p_max_pu', 1.)
|
||||
p_max_pu = config['links'].get('p_max_pu', 1.)
|
||||
links['p_max_pu'] = p_max_pu
|
||||
links['p_min_pu'] = -p_max_pu
|
||||
|
||||
links_p_nom = pd.read_csv(snakemake.input.links_p_nom)
|
||||
links_p_nom = pd.read_csv(links_p_nom)
|
||||
|
||||
# filter links that are not in operation anymore
|
||||
removed_b = links_p_nom.Remarks.str.contains('Shut down|Replaced', na=False)
|
||||
@ -331,8 +334,8 @@ def _set_electrical_parameters_links(links):
|
||||
return links
|
||||
|
||||
|
||||
def _set_electrical_parameters_converters(converters):
|
||||
p_max_pu = snakemake.config['links'].get('p_max_pu', 1.)
|
||||
def _set_electrical_parameters_converters(converters, config):
|
||||
p_max_pu = config['links'].get('p_max_pu', 1.)
|
||||
converters['p_max_pu'] = p_max_pu
|
||||
converters['p_min_pu'] = -p_max_pu
|
||||
|
||||
@ -345,8 +348,8 @@ def _set_electrical_parameters_converters(converters):
|
||||
return converters
|
||||
|
||||
|
||||
def _set_electrical_parameters_transformers(transformers):
|
||||
config = snakemake.config['transformers']
|
||||
def _set_electrical_parameters_transformers(transformers, config):
|
||||
config = config['transformers']
|
||||
|
||||
## Add transformer parameters
|
||||
transformers["x"] = config.get('x', 0.1)
|
||||
@ -373,7 +376,7 @@ def _remove_unconnected_components(network):
|
||||
return network[component == component_sizes.index[0]]
|
||||
|
||||
|
||||
def _set_countries_and_substations(n):
|
||||
def _set_countries_and_substations(n, config, country_shapes, offshore_shapes):
|
||||
|
||||
buses = n.buses
|
||||
|
||||
@ -386,9 +389,9 @@ def _set_countries_and_substations(n):
|
||||
index=buses.index
|
||||
)
|
||||
|
||||
countries = snakemake.config['countries']
|
||||
country_shapes = gpd.read_file(snakemake.input.country_shapes).set_index('name')['geometry']
|
||||
offshore_shapes = gpd.read_file(snakemake.input.offshore_shapes).set_index('name')['geometry']
|
||||
countries = config['countries']
|
||||
country_shapes = gpd.read_file(country_shapes).set_index('name')['geometry']
|
||||
offshore_shapes = gpd.read_file(offshore_shapes).set_index('name')['geometry']
|
||||
substation_b = buses['symbol'].str.contains('substation|converter station', case=False)
|
||||
|
||||
def prefer_voltage(x, which):
|
||||
@ -498,19 +501,19 @@ def _replace_b2b_converter_at_country_border_by_link(n):
|
||||
.format(i, b0, line, linkcntry.at[i], buscntry.at[b1]))
|
||||
|
||||
|
||||
def _set_links_underwater_fraction(n):
|
||||
def _set_links_underwater_fraction(n, offshore_shapes):
|
||||
if n.links.empty: return
|
||||
|
||||
if not hasattr(n.links, 'geometry'):
|
||||
n.links['underwater_fraction'] = 0.
|
||||
else:
|
||||
offshore_shape = gpd.read_file(snakemake.input.offshore_shapes).unary_union
|
||||
offshore_shape = gpd.read_file(offshore_shapes).unary_union
|
||||
links = gpd.GeoSeries(n.links.geometry.dropna().map(shapely.wkt.loads))
|
||||
n.links['underwater_fraction'] = links.intersection(offshore_shape).length / links.length
|
||||
|
||||
|
||||
def _adjust_capacities_of_under_construction_branches(n):
|
||||
lines_mode = snakemake.config['lines'].get('under_construction', 'undef')
|
||||
def _adjust_capacities_of_under_construction_branches(n, config):
|
||||
lines_mode = config['lines'].get('under_construction', 'undef')
|
||||
if lines_mode == 'zero':
|
||||
n.lines.loc[n.lines.under_construction, 'num_parallel'] = 0.
|
||||
n.lines.loc[n.lines.under_construction, 's_nom'] = 0.
|
||||
@ -519,7 +522,7 @@ def _adjust_capacities_of_under_construction_branches(n):
|
||||
elif lines_mode != 'keep':
|
||||
logger.warning("Unrecognized configuration for `lines: under_construction` = `{}`. Keeping under construction lines.")
|
||||
|
||||
links_mode = snakemake.config['links'].get('under_construction', 'undef')
|
||||
links_mode = config['links'].get('under_construction', 'undef')
|
||||
if links_mode == 'zero':
|
||||
n.links.loc[n.links.under_construction, "p_nom"] = 0.
|
||||
elif links_mode == 'remove':
|
||||
@ -534,28 +537,30 @@ def _adjust_capacities_of_under_construction_branches(n):
|
||||
return n
|
||||
|
||||
|
||||
def base_network():
|
||||
buses = _load_buses_from_eg()
|
||||
def base_network(eg_buses, eg_converters, eg_transformers, eg_lines, eg_links,
|
||||
links_p_nom, links_tyndp, europe_shape, country_shapes, offshore_shapes,
|
||||
parameter_corrections, config):
|
||||
|
||||
links = _load_links_from_eg(buses)
|
||||
if snakemake.config['links'].get('include_tyndp'):
|
||||
buses, links = _add_links_from_tyndp(buses, links)
|
||||
buses = _load_buses_from_eg(eg_buses, europe_shape, config['electricity'])
|
||||
|
||||
converters = _load_converters_from_eg(buses)
|
||||
links = _load_links_from_eg(buses, eg_links)
|
||||
if config['links'].get('include_tyndp'):
|
||||
buses, links = _add_links_from_tyndp(buses, links, links_tyndp, europe_shape)
|
||||
|
||||
lines = _load_lines_from_eg(buses)
|
||||
transformers = _load_transformers_from_eg(buses)
|
||||
converters = _load_converters_from_eg(buses, eg_converters)
|
||||
|
||||
lines = _set_electrical_parameters_lines(lines)
|
||||
transformers = _set_electrical_parameters_transformers(transformers)
|
||||
links = _set_electrical_parameters_links(links)
|
||||
converters = _set_electrical_parameters_converters(converters)
|
||||
lines = _load_lines_from_eg(buses, eg_lines)
|
||||
transformers = _load_transformers_from_eg(buses, eg_transformers)
|
||||
|
||||
lines = _set_electrical_parameters_lines(lines, config)
|
||||
transformers = _set_electrical_parameters_transformers(transformers, config)
|
||||
links = _set_electrical_parameters_links(links, config, links_p_nom)
|
||||
converters = _set_electrical_parameters_converters(converters, config)
|
||||
|
||||
n = pypsa.Network()
|
||||
n.name = 'PyPSA-Eur'
|
||||
|
||||
n.set_snapshots(pd.date_range(freq='h', **snakemake.config['snapshots']))
|
||||
n.snapshot_weightings[:] *= 8760. / n.snapshot_weightings.sum()
|
||||
n.set_snapshots(pd.date_range(freq='h', **config['snapshots']))
|
||||
|
||||
n.import_components_from_dataframe(buses, "Bus")
|
||||
n.import_components_from_dataframe(lines, "Line")
|
||||
@ -565,17 +570,17 @@ def base_network():
|
||||
|
||||
_set_lines_s_nom_from_linetypes(n)
|
||||
|
||||
_apply_parameter_corrections(n)
|
||||
_apply_parameter_corrections(n, parameter_corrections)
|
||||
|
||||
n = _remove_unconnected_components(n)
|
||||
|
||||
_set_countries_and_substations(n)
|
||||
_set_countries_and_substations(n, config, country_shapes, offshore_shapes)
|
||||
|
||||
_set_links_underwater_fraction(n)
|
||||
_set_links_underwater_fraction(n, offshore_shapes)
|
||||
|
||||
_replace_b2b_converter_at_country_border_by_link(n)
|
||||
|
||||
n = _adjust_capacities_of_under_construction_branches(n)
|
||||
n = _adjust_capacities_of_under_construction_branches(n, config)
|
||||
|
||||
return n
|
||||
|
||||
@ -585,6 +590,8 @@ if __name__ == "__main__":
|
||||
snakemake = mock_snakemake('base_network')
|
||||
configure_logging(snakemake)
|
||||
|
||||
n = base_network()
|
||||
n = base_network(snakemake.input.eg_buses, snakemake.input.eg_converters, snakemake.input.eg_transformers, snakemake.input.eg_lines, snakemake.input.eg_links,
|
||||
snakemake.input.links_p_nom, snakemake.input.links_tyndp, snakemake.input.europe_shape, snakemake.input.country_shapes, snakemake.input.offshore_shapes,
|
||||
snakemake.input.parameter_corrections, snakemake.config)
|
||||
|
||||
n.export_to_netcdf(snakemake.output[0])
|
||||
|
@ -1,6 +1,6 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Creates Voronoi shapes for each bus representing both onshore and offshore regions.
|
||||
@ -47,9 +47,10 @@ from _helpers import configure_logging
|
||||
import pypsa
|
||||
import os
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
import geopandas as gpd
|
||||
|
||||
from vresutils.graph import voronoi_partition_pts
|
||||
from shapely.geometry import Polygon
|
||||
from scipy.spatial import Voronoi
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -61,6 +62,53 @@ def save_to_geojson(s, fn):
|
||||
s.to_file(fn, driver='GeoJSON', schema=schema)
|
||||
|
||||
|
||||
def voronoi_partition_pts(points, outline):
|
||||
"""
|
||||
Compute the polygons of a voronoi partition of `points` within the
|
||||
polygon `outline`. Taken from
|
||||
https://github.com/FRESNA/vresutils/blob/master/vresutils/graph.py
|
||||
Attributes
|
||||
----------
|
||||
points : Nx2 - ndarray[dtype=float]
|
||||
outline : Polygon
|
||||
Returns
|
||||
-------
|
||||
polygons : N - ndarray[dtype=Polygon|MultiPolygon]
|
||||
"""
|
||||
|
||||
points = np.asarray(points)
|
||||
|
||||
if len(points) == 1:
|
||||
polygons = [outline]
|
||||
else:
|
||||
xmin, ymin = np.amin(points, axis=0)
|
||||
xmax, ymax = np.amax(points, axis=0)
|
||||
xspan = xmax - xmin
|
||||
yspan = ymax - ymin
|
||||
|
||||
# to avoid any network positions outside all Voronoi cells, append
|
||||
# the corners of a rectangle framing these points
|
||||
vor = Voronoi(np.vstack((points,
|
||||
[[xmin-3.*xspan, ymin-3.*yspan],
|
||||
[xmin-3.*xspan, ymax+3.*yspan],
|
||||
[xmax+3.*xspan, ymin-3.*yspan],
|
||||
[xmax+3.*xspan, ymax+3.*yspan]])))
|
||||
|
||||
polygons = []
|
||||
for i in range(len(points)):
|
||||
poly = Polygon(vor.vertices[vor.regions[vor.point_region[i]]])
|
||||
|
||||
if not poly.is_valid:
|
||||
poly = poly.buffer(0)
|
||||
|
||||
poly = poly.intersection(outline)
|
||||
|
||||
polygons.append(poly)
|
||||
|
||||
|
||||
return np.array(polygons, dtype=object)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if 'snakemake' not in globals():
|
||||
from _helpers import mock_snakemake
|
||||
|
@ -1,6 +1,6 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2021 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Create cutouts with `atlite <https://atlite.readthedocs.io/en/latest/>`_.
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Build hydroelectric inflow time-series for each country.
|
||||
@ -74,7 +74,7 @@ if __name__ == "__main__":
|
||||
snakemake = mock_snakemake('build_hydro_profile')
|
||||
configure_logging(snakemake)
|
||||
|
||||
config = snakemake.config['renewable']['hydro']
|
||||
config_hydro = snakemake.config['renewable']['hydro']
|
||||
cutout = atlite.Cutout(snakemake.input.cutout)
|
||||
|
||||
countries = snakemake.config['countries']
|
||||
@ -89,7 +89,7 @@ if __name__ == "__main__":
|
||||
lower_threshold_quantile=True,
|
||||
normalize_using_yearly=eia_stats)
|
||||
|
||||
if 'clip_min_inflow' in config:
|
||||
inflow = inflow.where(inflow > config['clip_min_inflow'], 0)
|
||||
if 'clip_min_inflow' in config_hydro:
|
||||
inflow = inflow.where(inflow > config_hydro['clip_min_inflow'], 0)
|
||||
|
||||
inflow.to_netcdf(snakemake.output[0])
|
||||
|
@ -1,6 +1,6 @@
|
||||
# SPDX-FileCopyrightText: : 2020 @JanFrederickUnnewehr, The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
|
||||
@ -14,7 +14,6 @@ Relevant Settings
|
||||
snapshots:
|
||||
|
||||
load:
|
||||
url:
|
||||
interpolate_limit:
|
||||
time_shift_for_large_gaps:
|
||||
manual_adjustments:
|
||||
@ -71,7 +70,7 @@ def load_timeseries(fn, years, countries, powerstatistics=True):
|
||||
"""
|
||||
logger.info(f"Retrieving load data from '{fn}'.")
|
||||
|
||||
pattern = 'power_statistics' if powerstatistics else '_transparency'
|
||||
pattern = 'power_statistics' if powerstatistics else 'transparency'
|
||||
pattern = f'_load_actual_entsoe_{pattern}'
|
||||
rename = lambda s: s[:-len(pattern)]
|
||||
date_parser = lambda x: dateutil.parser.parse(x, ignoretz=True)
|
||||
@ -117,14 +116,19 @@ def nan_statistics(df):
|
||||
keys=['total', 'consecutive', 'max_total_per_month'], axis=1)
|
||||
|
||||
|
||||
def copy_timeslice(load, cntry, start, stop, delta):
|
||||
def copy_timeslice(load, cntry, start, stop, delta, fn_load=None):
|
||||
start = pd.Timestamp(start)
|
||||
stop = pd.Timestamp(stop)
|
||||
if start-delta in load.index and stop in load.index and cntry in load:
|
||||
load.loc[start:stop, cntry] = load.loc[start-delta:stop-delta, cntry].values
|
||||
if (start in load.index and stop in load.index):
|
||||
if start-delta in load.index and stop-delta in load.index and cntry in load:
|
||||
load.loc[start:stop, cntry] = load.loc[start-delta:stop-delta, cntry].values
|
||||
elif fn_load is not None:
|
||||
duration = pd.date_range(freq='h', start=start-delta, end=stop-delta)
|
||||
load_raw = load_timeseries(fn_load, duration, [cntry], powerstatistics)
|
||||
load.loc[start:stop, cntry] = load_raw.loc[start-delta:stop-delta, cntry].values
|
||||
|
||||
|
||||
def manual_adjustment(load, powerstatistics):
|
||||
def manual_adjustment(load, fn_load, powerstatistics):
|
||||
"""
|
||||
Adjust gaps manual for load data from OPSD time-series package.
|
||||
|
||||
@ -151,6 +155,8 @@ def manual_adjustment(load, powerstatistics):
|
||||
powerstatistics: bool
|
||||
Whether argument load comprises the electricity consumption data of
|
||||
the ENTSOE power statistics or of the ENTSOE transparency map
|
||||
load_fn: str
|
||||
File name or url location (file format .csv)
|
||||
|
||||
Returns
|
||||
-------
|
||||
@ -176,7 +182,11 @@ def manual_adjustment(load, powerstatistics):
|
||||
copy_timeslice(load, 'CH', '2010-11-04 04:00', '2010-11-04 22:00', Delta(days=1))
|
||||
copy_timeslice(load, 'NO', '2010-12-09 11:00', '2010-12-09 18:00', Delta(days=1))
|
||||
# whole january missing
|
||||
copy_timeslice(load, 'GB', '2009-12-31 23:00', '2010-01-31 23:00', Delta(days=-364))
|
||||
copy_timeslice(load, 'GB', '2010-01-01 00:00', '2010-01-31 23:00', Delta(days=-365), fn_load)
|
||||
# 1.1. at midnight gets special treatment
|
||||
copy_timeslice(load, 'IE', '2016-01-01 00:00', '2016-01-01 01:00', Delta(days=-366), fn_load)
|
||||
copy_timeslice(load, 'PT', '2016-01-01 00:00', '2016-01-01 01:00', Delta(days=-366), fn_load)
|
||||
copy_timeslice(load, 'GB', '2016-01-01 00:00', '2016-01-01 01:00', Delta(days=-366), fn_load)
|
||||
|
||||
else:
|
||||
if 'ME' in load:
|
||||
@ -197,19 +207,17 @@ if __name__ == "__main__":
|
||||
|
||||
configure_logging(snakemake)
|
||||
|
||||
config = snakemake.config
|
||||
powerstatistics = config['load']['power_statistics']
|
||||
url = config['load']['url']
|
||||
interpolate_limit = config['load']['interpolate_limit']
|
||||
countries = config['countries']
|
||||
snapshots = pd.date_range(freq='h', **config['snapshots'])
|
||||
powerstatistics = snakemake.config['load']['power_statistics']
|
||||
interpolate_limit = snakemake.config['load']['interpolate_limit']
|
||||
countries = snakemake.config['countries']
|
||||
snapshots = pd.date_range(freq='h', **snakemake.config['snapshots'])
|
||||
years = slice(snapshots[0], snapshots[-1])
|
||||
time_shift = config['load']['time_shift_for_large_gaps']
|
||||
time_shift = snakemake.config['load']['time_shift_for_large_gaps']
|
||||
|
||||
load = load_timeseries(url, years, countries, powerstatistics)
|
||||
load = load_timeseries(snakemake.input[0], years, countries, powerstatistics)
|
||||
|
||||
if config['load']['manual_adjustments']:
|
||||
load = manual_adjustment(load, powerstatistics)
|
||||
if snakemake.config['load']['manual_adjustments']:
|
||||
load = manual_adjustment(load, snakemake.input[0], powerstatistics)
|
||||
|
||||
logger.info(f"Linearly interpolate gaps of size {interpolate_limit} and less.")
|
||||
load = load.interpolate(method='linear', limit=interpolate_limit)
|
||||
|
@ -1,91 +0,0 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
"""
|
||||
Rasters the vector data of the `Natura 2000 <https://en.wikipedia.org/wiki/Natura_2000>`_ natural protection areas onto all cutout regions.
|
||||
|
||||
Relevant Settings
|
||||
-----------------
|
||||
|
||||
.. code:: yaml
|
||||
|
||||
renewable:
|
||||
{technology}:
|
||||
cutout:
|
||||
|
||||
.. seealso::
|
||||
Documentation of the configuration file ``config.yaml`` at
|
||||
:ref:`renewable_cf`
|
||||
|
||||
Inputs
|
||||
------
|
||||
|
||||
- ``data/bundle/natura/Natura2000_end2015.shp``: `Natura 2000 <https://en.wikipedia.org/wiki/Natura_2000>`_ natural protection areas.
|
||||
|
||||
.. image:: ../img/natura.png
|
||||
:scale: 33 %
|
||||
|
||||
Outputs
|
||||
-------
|
||||
|
||||
- ``resources/natura.tiff``: Rasterized version of `Natura 2000 <https://en.wikipedia.org/wiki/Natura_2000>`_ natural protection areas to reduce computation times.
|
||||
|
||||
.. image:: ../img/natura.png
|
||||
:scale: 33 %
|
||||
|
||||
Description
|
||||
-----------
|
||||
|
||||
"""
|
||||
|
||||
import logging
|
||||
from _helpers import configure_logging
|
||||
|
||||
import atlite
|
||||
import geopandas as gpd
|
||||
import rasterio as rio
|
||||
from rasterio.features import geometry_mask
|
||||
from rasterio.warp import transform_bounds
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def determine_cutout_xXyY(cutout_name):
|
||||
cutout = atlite.Cutout(cutout_name)
|
||||
assert cutout.crs.to_epsg() == 4326
|
||||
x, X, y, Y = cutout.extent
|
||||
dx, dy = cutout.dx, cutout.dy
|
||||
return [x - dx/2., X + dx/2., y - dy/2., Y + dy/2.]
|
||||
|
||||
|
||||
def get_transform_and_shape(bounds, res):
|
||||
left, bottom = [(b // res)* res for b in bounds[:2]]
|
||||
right, top = [(b // res + 1) * res for b in bounds[2:]]
|
||||
shape = int((top - bottom) // res), int((right - left) / res)
|
||||
transform = rio.Affine(res, 0, left, 0, -res, top)
|
||||
return transform, shape
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if 'snakemake' not in globals():
|
||||
from _helpers import mock_snakemake
|
||||
snakemake = mock_snakemake('build_natura_raster')
|
||||
configure_logging(snakemake)
|
||||
|
||||
|
||||
cutouts = snakemake.input.cutouts
|
||||
xs, Xs, ys, Ys = zip(*(determine_cutout_xXyY(cutout) for cutout in cutouts))
|
||||
bounds = transform_bounds(4326, 3035, min(xs), min(ys), max(Xs), max(Ys))
|
||||
transform, out_shape = get_transform_and_shape(bounds, res=100)
|
||||
|
||||
# adjusted boundaries
|
||||
shapes = gpd.read_file(snakemake.input.natura).to_crs(3035)
|
||||
raster = ~geometry_mask(shapes.geometry, out_shape[::-1], transform)
|
||||
raster = raster.astype(rio.uint8)
|
||||
|
||||
with rio.open(snakemake.output[0], 'w', driver='GTiff', dtype=rio.uint8,
|
||||
count=1, transform=transform, crs=3035, compress='lzw',
|
||||
width=raster.shape[1], height=raster.shape[0]) as dst:
|
||||
dst.write(raster, indexes=1)
|
||||
|
@ -1,6 +1,6 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
# coding: utf-8
|
||||
"""
|
||||
@ -84,15 +84,14 @@ from scipy.spatial import cKDTree as KDTree
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def add_custom_powerplants(ppl):
|
||||
custom_ppl_query = snakemake.config['electricity']['custom_powerplants']
|
||||
def add_custom_powerplants(ppl, custom_powerplants, custom_ppl_query=False):
|
||||
if not custom_ppl_query:
|
||||
return ppl
|
||||
add_ppls = pd.read_csv(snakemake.input.custom_powerplants, index_col=0,
|
||||
add_ppls = pd.read_csv(custom_powerplants, index_col=0,
|
||||
dtype={'bus': 'str'})
|
||||
if isinstance(custom_ppl_query, str):
|
||||
add_ppls.query(custom_ppl_query, inplace=True)
|
||||
return ppl.append(add_ppls, sort=False, ignore_index=True, verify_integrity=True)
|
||||
return pd.concat([ppl, add_ppls], sort=False, ignore_index=True, verify_integrity=True)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
@ -119,7 +118,9 @@ if __name__ == "__main__":
|
||||
if isinstance(ppl_query, str):
|
||||
ppl.query(ppl_query, inplace=True)
|
||||
|
||||
ppl = add_custom_powerplants(ppl) # add carriers from own powerplant files
|
||||
# add carriers from own powerplant files:
|
||||
custom_ppl_query = snakemake.config['electricity']['custom_powerplants']
|
||||
ppl = add_custom_powerplants(ppl, snakemake.input.custom_powerplants, custom_ppl_query)
|
||||
|
||||
cntries_without_ppl = [c for c in countries if c not in ppl.Country.unique()]
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""Calculates for each network node the
|
||||
(i) installable capacity (based on land-use), (ii) the available generation time
|
||||
@ -183,11 +183,13 @@ import progressbar as pgb
|
||||
import geopandas as gpd
|
||||
import xarray as xr
|
||||
import numpy as np
|
||||
import functools
|
||||
import atlite
|
||||
import logging
|
||||
from pypsa.geo import haversine
|
||||
from shapely.geometry import LineString
|
||||
import time
|
||||
from dask.distributed import Client, LocalCluster
|
||||
|
||||
from _helpers import configure_logging
|
||||
|
||||
@ -200,9 +202,9 @@ if __name__ == '__main__':
|
||||
snakemake = mock_snakemake('build_renewable_profiles', technology='solar')
|
||||
configure_logging(snakemake)
|
||||
pgb.streams.wrap_stderr()
|
||||
paths = snakemake.input
|
||||
nprocesses = snakemake.config['atlite'].get('nprocesses')
|
||||
noprogress = not snakemake.config['atlite'].get('show_progress', True)
|
||||
|
||||
nprocesses = int(snakemake.threads)
|
||||
noprogress = not snakemake.config['atlite'].get('show_progress', False)
|
||||
config = snakemake.config['renewable'][snakemake.wildcards.technology]
|
||||
resource = config['resource'] # pv panel config / wind turbine config
|
||||
correction_factor = config.get('correction_factor', 1.)
|
||||
@ -215,36 +217,43 @@ if __name__ == '__main__':
|
||||
if correction_factor != 1.:
|
||||
logger.info(f'correction_factor is set as {correction_factor}')
|
||||
|
||||
|
||||
cutout = atlite.Cutout(paths['cutout'])
|
||||
regions = gpd.read_file(paths.regions).set_index('name').rename_axis('bus')
|
||||
cluster = LocalCluster(n_workers=nprocesses, threads_per_worker=1)
|
||||
client = Client(cluster, asynchronous=True)
|
||||
|
||||
cutout = atlite.Cutout(snakemake.input['cutout'])
|
||||
regions = gpd.read_file(snakemake.input.regions).set_index('name').rename_axis('bus')
|
||||
buses = regions.index
|
||||
|
||||
excluder = atlite.ExclusionContainer(crs=3035, res=100)
|
||||
|
||||
if config['natura']:
|
||||
excluder.add_raster(paths.natura, nodata=0, allow_no_overlap=True)
|
||||
mask = regions.to_crs(3035).buffer(0) # buffer to avoid invalid geometry
|
||||
natura = gpd.read_file(snakemake.input.natura, mask=mask)
|
||||
excluder.add_geometry(natura.geometry)
|
||||
|
||||
corine = config.get("corine", {})
|
||||
if "grid_codes" in corine:
|
||||
codes = corine["grid_codes"]
|
||||
excluder.add_raster(paths.corine, codes=codes, invert=True, crs=3035)
|
||||
excluder.add_raster(snakemake.input.corine, codes=codes, invert=True, crs=3035)
|
||||
if corine.get("distance", 0.) > 0.:
|
||||
codes = corine["distance_grid_codes"]
|
||||
buffer = corine["distance"]
|
||||
excluder.add_raster(paths.corine, codes=codes, buffer=buffer, crs=3035)
|
||||
excluder.add_raster(snakemake.input.corine, codes=codes, buffer=buffer, crs=3035)
|
||||
|
||||
if "max_depth" in config:
|
||||
func = lambda v: v <= -config['max_depth']
|
||||
excluder.add_raster(paths.gebco, codes=func, crs=4236, nodata=-1000)
|
||||
# lambda not supported for atlite + multiprocessing
|
||||
# use named function np.greater with partially frozen argument instead
|
||||
# and exclude areas where: -max_depth > grid cell depth
|
||||
func = functools.partial(np.greater,-config['max_depth'])
|
||||
excluder.add_raster(snakemake.input.gebco, codes=func, crs=4326, nodata=-1000)
|
||||
|
||||
if 'min_shore_distance' in config:
|
||||
buffer = config['min_shore_distance']
|
||||
excluder.add_geometry(paths.country_shapes, buffer=buffer)
|
||||
excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer)
|
||||
|
||||
if 'max_shore_distance' in config:
|
||||
buffer = config['max_shore_distance']
|
||||
excluder.add_geometry(paths.country_shapes, buffer=buffer, invert=True)
|
||||
excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer, invert=True)
|
||||
|
||||
kwargs = dict(nprocesses=nprocesses, disable_progressbar=noprogress)
|
||||
if noprogress:
|
||||
@ -262,7 +271,7 @@ if __name__ == '__main__':
|
||||
|
||||
potential = capacity_per_sqkm * availability.sum('bus') * area
|
||||
func = getattr(cutout, resource.pop('method'))
|
||||
resource['dask_kwargs'] = {'num_workers': nprocesses}
|
||||
resource['dask_kwargs'] = {"scheduler": client}
|
||||
capacity_factor = correction_factor * func(capacity_factor=True, **resource)
|
||||
layout = capacity_factor * area * capacity_per_sqkm
|
||||
profile, capacities = func(matrix=availability.stack(spatial=['y','x']),
|
||||
@ -311,7 +320,7 @@ if __name__ == '__main__':
|
||||
|
||||
if snakemake.wildcards.technology.startswith("offwind"):
|
||||
logger.info('Calculate underwater fraction of connections.')
|
||||
offshore_shape = gpd.read_file(paths['offshore_shapes']).unary_union
|
||||
offshore_shape = gpd.read_file(snakemake.input['offshore_shapes']).unary_union
|
||||
underwater_fraction = []
|
||||
for bus in buses:
|
||||
p = centre_of_mass.sel(bus=bus).data
|
||||
|
@ -1,6 +1,6 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Creates GIS shape files of the countries, exclusive economic zones and `NUTS3 <https://en.wikipedia.org/wiki/Nomenclature_of_Territorial_Units_for_Statistics>`_ areas.
|
||||
@ -34,8 +34,8 @@ Inputs
|
||||
.. image:: ../img/nuts3.png
|
||||
:scale: 33 %
|
||||
|
||||
- ``data/bundle/nama_10r_3popgdp.tsv.gz``: Average annual population by NUTS3 region (`eurostat <http://appsso.eurostat.ec.europa.eu/nui/show.do?dataset=nama_10r_3popgdp&lang=en>`_)
|
||||
- ``data/bundle/nama_10r_3gdp.tsv.gz``: Gross domestic product (GDP) by NUTS 3 regions (`eurostat <http://appsso.eurostat.ec.europa.eu/nui/show.do?dataset=nama_10r_3gdp&lang=en>`_)
|
||||
- ``data/bundle/nama_10r_3popgdp.tsv.gz``: Average annual population by NUTS3 region (`eurostat <http://appsso.eurostat.ec.europa.eu/nui/show.do?dataset=nama_10r_3popgdp&lang=en>`__)
|
||||
- ``data/bundle/nama_10r_3gdp.tsv.gz``: Gross domestic product (GDP) by NUTS 3 regions (`eurostat <http://appsso.eurostat.ec.europa.eu/nui/show.do?dataset=nama_10r_3gdp&lang=en>`__)
|
||||
- ``data/bundle/ch_cantons.csv``: Mapping between Swiss Cantons and NUTS3 regions
|
||||
- ``data/bundle/je-e-21.03.02.xls``: Population and GDP data per Canton (`BFS - Swiss Federal Statistical Office <https://www.bfs.admin.ch/bfs/en/home/news/whats-new.assetdetail.7786557.html>`_ )
|
||||
|
||||
@ -73,13 +73,13 @@ from _helpers import configure_logging
|
||||
import os
|
||||
import numpy as np
|
||||
from operator import attrgetter
|
||||
from six.moves import reduce
|
||||
from functools import reduce
|
||||
from itertools import takewhile
|
||||
|
||||
import pandas as pd
|
||||
import geopandas as gpd
|
||||
from shapely.geometry import MultiPolygon, Polygon
|
||||
from shapely.ops import cascaded_union
|
||||
from shapely.ops import unary_union
|
||||
import pycountry as pyc
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -95,7 +95,7 @@ def _get_country(target, **keys):
|
||||
|
||||
def _simplify_polys(polys, minarea=0.1, tolerance=0.01, filterremote=True):
|
||||
if isinstance(polys, MultiPolygon):
|
||||
polys = sorted(polys, key=attrgetter('area'), reverse=True)
|
||||
polys = sorted(polys.geoms, key=attrgetter('area'), reverse=True)
|
||||
mainpoly = polys[0]
|
||||
mainlength = np.sqrt(mainpoly.area/(2.*np.pi))
|
||||
if mainpoly.area > minarea:
|
||||
@ -107,26 +107,25 @@ def _simplify_polys(polys, minarea=0.1, tolerance=0.01, filterremote=True):
|
||||
return polys.simplify(tolerance=tolerance)
|
||||
|
||||
|
||||
def countries():
|
||||
cntries = snakemake.config['countries']
|
||||
if 'RS' in cntries: cntries.append('KV')
|
||||
def countries(naturalearth, country_list):
|
||||
if 'RS' in country_list: country_list.append('KV')
|
||||
|
||||
df = gpd.read_file(snakemake.input.naturalearth)
|
||||
df = gpd.read_file(naturalearth)
|
||||
|
||||
# Names are a hassle in naturalearth, try several fields
|
||||
fieldnames = (df[x].where(lambda s: s!='-99') for x in ('ISO_A2', 'WB_A2', 'ADM0_A3'))
|
||||
df['name'] = reduce(lambda x,y: x.fillna(y), fieldnames, next(fieldnames)).str[0:2]
|
||||
|
||||
df = df.loc[df.name.isin(cntries) & ((df['scalerank'] == 0) | (df['scalerank'] == 5))]
|
||||
df = df.loc[df.name.isin(country_list) & ((df['scalerank'] == 0) | (df['scalerank'] == 5))]
|
||||
s = df.set_index('name')['geometry'].map(_simplify_polys)
|
||||
if 'RS' in cntries: s['RS'] = s['RS'].union(s.pop('KV'))
|
||||
if 'RS' in country_list: s['RS'] = s['RS'].union(s.pop('KV'))
|
||||
|
||||
return s
|
||||
|
||||
|
||||
def eez(country_shapes):
|
||||
df = gpd.read_file(snakemake.input.eez)
|
||||
df = df.loc[df['ISO_3digit'].isin([_get_country('alpha_3', alpha_2=c) for c in snakemake.config['countries']])]
|
||||
def eez(country_shapes, eez, country_list):
|
||||
df = gpd.read_file(eez)
|
||||
df = df.loc[df['ISO_3digit'].isin([_get_country('alpha_3', alpha_2=c) for c in country_list])]
|
||||
df['name'] = df['ISO_3digit'].map(lambda c: _get_country('alpha_2', alpha_3=c))
|
||||
s = df.set_index('name').geometry.map(lambda s: _simplify_polys(s, filterremote=False))
|
||||
s = gpd.GeoSeries({k:v for k,v in s.iteritems() if v.distance(country_shapes[k]) < 1e-3})
|
||||
@ -139,39 +138,41 @@ def country_cover(country_shapes, eez_shapes=None):
|
||||
if eez_shapes is not None:
|
||||
shapes += list(eez_shapes)
|
||||
|
||||
europe_shape = cascaded_union(shapes)
|
||||
europe_shape = unary_union(shapes)
|
||||
if isinstance(europe_shape, MultiPolygon):
|
||||
europe_shape = max(europe_shape, key=attrgetter('area'))
|
||||
return Polygon(shell=europe_shape.exterior)
|
||||
|
||||
|
||||
def nuts3(country_shapes):
|
||||
df = gpd.read_file(snakemake.input.nuts3)
|
||||
def nuts3(country_shapes, nuts3, nuts3pop, nuts3gdp, ch_cantons, ch_popgdp):
|
||||
df = gpd.read_file(nuts3)
|
||||
df = df.loc[df['STAT_LEVL_'] == 3]
|
||||
df['geometry'] = df['geometry'].map(_simplify_polys)
|
||||
df = df.rename(columns={'NUTS_ID': 'id'})[['id', 'geometry']].set_index('id')
|
||||
|
||||
pop = pd.read_table(snakemake.input.nuts3pop, na_values=[':'], delimiter=' ?\t', engine='python')
|
||||
pop = pd.read_table(nuts3pop, na_values=[':'], delimiter=' ?\t', engine='python')
|
||||
pop = (pop
|
||||
.set_index(pd.MultiIndex.from_tuples(pop.pop('unit,geo\\time').str.split(','))).loc['THS']
|
||||
.applymap(lambda x: pd.to_numeric(x, errors='coerce'))
|
||||
.fillna(method='bfill', axis=1))['2014']
|
||||
|
||||
gdp = pd.read_table(snakemake.input.nuts3gdp, na_values=[':'], delimiter=' ?\t', engine='python')
|
||||
gdp = pd.read_table(nuts3gdp, na_values=[':'], delimiter=' ?\t', engine='python')
|
||||
gdp = (gdp
|
||||
.set_index(pd.MultiIndex.from_tuples(gdp.pop('unit,geo\\time').str.split(','))).loc['EUR_HAB']
|
||||
.applymap(lambda x: pd.to_numeric(x, errors='coerce'))
|
||||
.fillna(method='bfill', axis=1))['2014']
|
||||
|
||||
cantons = pd.read_csv(snakemake.input.ch_cantons)
|
||||
cantons = pd.read_csv(ch_cantons)
|
||||
cantons = cantons.set_index(cantons['HASC'].str[3:])['NUTS']
|
||||
cantons = cantons.str.pad(5, side='right', fillchar='0')
|
||||
|
||||
swiss = pd.read_excel(snakemake.input.ch_popgdp, skiprows=3, index_col=0)
|
||||
swiss = pd.read_excel(ch_popgdp, skiprows=3, index_col=0)
|
||||
swiss.columns = swiss.columns.to_series().map(cantons)
|
||||
|
||||
pop = pop.append(pd.to_numeric(swiss.loc['Residents in 1000', 'CH040':]))
|
||||
gdp = gdp.append(pd.to_numeric(swiss.loc['Gross domestic product per capita in Swiss francs', 'CH040':]))
|
||||
swiss_pop = pd.to_numeric(swiss.loc['Residents in 1000', 'CH040':])
|
||||
pop = pd.concat([pop, swiss_pop])
|
||||
swiss_gdp = pd.to_numeric(swiss.loc['Gross domestic product per capita in Swiss francs', 'CH040':])
|
||||
gdp = pd.concat([gdp, swiss_gdp])
|
||||
|
||||
df = df.join(pd.DataFrame(dict(pop=pop, gdp=gdp)))
|
||||
|
||||
@ -195,7 +196,7 @@ def nuts3(country_shapes):
|
||||
manual['geometry'] = manual['country'].map(country_shapes)
|
||||
manual = manual.dropna()
|
||||
|
||||
df = df.append(manual, sort=False)
|
||||
df = pd.concat([df, manual], sort=False)
|
||||
|
||||
df.loc['ME000', 'pop'] = 650.
|
||||
|
||||
@ -218,16 +219,16 @@ if __name__ == "__main__":
|
||||
snakemake = mock_snakemake('build_shapes')
|
||||
configure_logging(snakemake)
|
||||
|
||||
out = snakemake.output
|
||||
country_shapes = countries(snakemake.input.naturalearth, snakemake.config['countries'])
|
||||
save_to_geojson(country_shapes, snakemake.output.country_shapes)
|
||||
|
||||
country_shapes = countries()
|
||||
save_to_geojson(country_shapes, out.country_shapes)
|
||||
|
||||
offshore_shapes = eez(country_shapes)
|
||||
save_to_geojson(offshore_shapes, out.offshore_shapes)
|
||||
offshore_shapes = eez(country_shapes, snakemake.input.eez, snakemake.config['countries'])
|
||||
save_to_geojson(offshore_shapes, snakemake.output.offshore_shapes)
|
||||
|
||||
europe_shape = country_cover(country_shapes, offshore_shapes)
|
||||
save_to_geojson(gpd.GeoSeries(europe_shape), out.europe_shape)
|
||||
save_to_geojson(gpd.GeoSeries(europe_shape), snakemake.output.europe_shape)
|
||||
|
||||
nuts3_shapes = nuts3(country_shapes)
|
||||
save_to_geojson(nuts3_shapes, out.nuts3_shapes)
|
||||
nuts3_shapes = nuts3(country_shapes, snakemake.input.nuts3, snakemake.input.nuts3pop,
|
||||
snakemake.input.nuts3gdp, snakemake.input.ch_cantons, snakemake.input.ch_popgdp)
|
||||
|
||||
save_to_geojson(nuts3_shapes, snakemake.output.nuts3_shapes)
|
||||
|
@ -1,6 +1,6 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
# coding: utf-8
|
||||
"""
|
||||
@ -122,7 +122,7 @@ Exemplary unsolved network clustered to 37 nodes:
|
||||
"""
|
||||
|
||||
import logging
|
||||
from _helpers import configure_logging
|
||||
from _helpers import configure_logging, update_p_nom_max
|
||||
|
||||
import pypsa
|
||||
import os
|
||||
@ -135,11 +135,14 @@ import pyomo.environ as po
|
||||
import matplotlib.pyplot as plt
|
||||
import seaborn as sns
|
||||
|
||||
from six.moves import reduce
|
||||
from functools import reduce
|
||||
|
||||
from pypsa.networkclustering import (busmap_by_kmeans, busmap_by_spectral_clustering,
|
||||
_make_consense, get_clustering_from_busmap)
|
||||
|
||||
import warnings
|
||||
warnings.filterwarnings(action='ignore', category=UserWarning)
|
||||
|
||||
from add_electricity import load_costs
|
||||
|
||||
idx = pd.IndexSlice
|
||||
@ -170,12 +173,9 @@ def weighting_for_country(n, x):
|
||||
return (w * (100. / w.max())).clip(lower=1.).astype(int)
|
||||
|
||||
|
||||
def distribute_clusters(n, n_clusters, focus_weights=None, solver_name=None):
|
||||
def distribute_clusters(n, n_clusters, focus_weights=None, solver_name="cbc"):
|
||||
"""Determine the number of clusters per country"""
|
||||
|
||||
if solver_name is None:
|
||||
solver_name = snakemake.config['solving']['solver']['name']
|
||||
|
||||
L = (n.loads_t.p_set.mean()
|
||||
.groupby(n.loads.bus).sum()
|
||||
.groupby([n.buses.country, n.buses.sub_network]).sum()
|
||||
@ -218,7 +218,7 @@ def distribute_clusters(n, n_clusters, focus_weights=None, solver_name=None):
|
||||
results = opt.solve(m)
|
||||
assert results['Solver'][0]['Status'] == 'ok', f"Solver returned non-optimally: {results}"
|
||||
|
||||
return pd.Series(m.n.get_values(), index=L.index).astype(int)
|
||||
return pd.Series(m.n.get_values(), index=L.index).round().astype(int)
|
||||
|
||||
|
||||
def busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights=None, algorithm="kmeans", **algorithm_kwds):
|
||||
@ -226,6 +226,7 @@ def busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights=None, algori
|
||||
algorithm_kwds.setdefault('n_init', 1000)
|
||||
algorithm_kwds.setdefault('max_iter', 30000)
|
||||
algorithm_kwds.setdefault('tol', 1e-6)
|
||||
algorithm_kwds.setdefault('random_state', 0)
|
||||
|
||||
n.determine_network_topology()
|
||||
|
||||
@ -262,18 +263,16 @@ def clustering_for_n_clusters(n, n_clusters, custom_busmap=False, aggregate_carr
|
||||
algorithm="kmeans", extended_link_costs=0, focus_weights=None):
|
||||
|
||||
if potential_mode == 'simple':
|
||||
p_nom_max_strategy = np.sum
|
||||
p_nom_max_strategy = pd.Series.sum
|
||||
elif potential_mode == 'conservative':
|
||||
p_nom_max_strategy = np.min
|
||||
p_nom_max_strategy = pd.Series.min
|
||||
else:
|
||||
raise AttributeError(f"potential_mode should be one of 'simple' or 'conservative' but is '{potential_mode}'")
|
||||
|
||||
if custom_busmap:
|
||||
busmap = pd.read_csv(snakemake.input.custom_busmap, index_col=0, squeeze=True)
|
||||
busmap.index = busmap.index.astype(str)
|
||||
logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}")
|
||||
else:
|
||||
if not isinstance(custom_busmap, pd.Series):
|
||||
busmap = busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights, algorithm)
|
||||
else:
|
||||
busmap = custom_busmap
|
||||
|
||||
clustering = get_clustering_from_busmap(
|
||||
n, busmap,
|
||||
@ -282,7 +281,14 @@ def clustering_for_n_clusters(n, n_clusters, custom_busmap=False, aggregate_carr
|
||||
aggregate_generators_carriers=aggregate_carriers,
|
||||
aggregate_one_ports=["Load", "StorageUnit"],
|
||||
line_length_factor=line_length_factor,
|
||||
generator_strategies={'p_nom_max': p_nom_max_strategy},
|
||||
generator_strategies={'p_nom_max': p_nom_max_strategy,
|
||||
'p_nom_min': pd.Series.sum,
|
||||
'p_min_pu': pd.Series.mean,
|
||||
'marginal_cost': pd.Series.mean,
|
||||
'committable': np.any,
|
||||
'ramp_limit_up': pd.Series.max,
|
||||
'ramp_limit_down': pd.Series.max,
|
||||
},
|
||||
scale_link_capital_costs=False)
|
||||
|
||||
if not n.links.empty:
|
||||
@ -306,14 +312,12 @@ def save_to_geojson(s, fn):
|
||||
|
||||
|
||||
def cluster_regions(busmaps, input=None, output=None):
|
||||
if input is None: input = snakemake.input
|
||||
if output is None: output = snakemake.output
|
||||
|
||||
busmap = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0])
|
||||
|
||||
for which in ('regions_onshore', 'regions_offshore'):
|
||||
regions = gpd.read_file(getattr(input, which)).set_index('name')
|
||||
geom_c = regions.geometry.groupby(busmap).apply(shapely.ops.cascaded_union)
|
||||
geom_c = regions.geometry.groupby(busmap).apply(shapely.ops.unary_union)
|
||||
regions_c = gpd.GeoDataFrame(dict(geometry=geom_c))
|
||||
regions_c.index.name = 'name'
|
||||
save_to_geojson(regions_c, getattr(output, which))
|
||||
@ -346,6 +350,9 @@ if __name__ == "__main__":
|
||||
if snakemake.wildcards.clusters.endswith('m'):
|
||||
n_clusters = int(snakemake.wildcards.clusters[:-1])
|
||||
aggregate_carriers = pd.Index(n.generators.carrier.unique()).difference(renewable_carriers)
|
||||
elif snakemake.wildcards.clusters == 'all':
|
||||
n_clusters = len(n.buses)
|
||||
aggregate_carriers = None # All
|
||||
else:
|
||||
n_clusters = int(snakemake.wildcards.clusters)
|
||||
aggregate_carriers = None # All
|
||||
@ -357,10 +364,9 @@ if __name__ == "__main__":
|
||||
clustering = pypsa.networkclustering.Clustering(n, busmap, linemap, linemap, pd.Series(dtype='O'))
|
||||
else:
|
||||
line_length_factor = snakemake.config['lines']['length_factor']
|
||||
hvac_overhead_cost = (load_costs(n.snapshot_weightings.sum()/8760,
|
||||
tech_costs=snakemake.input.tech_costs,
|
||||
config=snakemake.config['costs'],
|
||||
elec_config=snakemake.config['electricity'])
|
||||
Nyears = n.snapshot_weightings.objective.sum()/8760
|
||||
|
||||
hvac_overhead_cost = (load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears)
|
||||
.at['HVAC overhead', 'capital_cost'])
|
||||
|
||||
def consense(x):
|
||||
@ -372,15 +378,20 @@ if __name__ == "__main__":
|
||||
potential_mode = consense(pd.Series([snakemake.config['renewable'][tech]['potential']
|
||||
for tech in renewable_carriers]))
|
||||
custom_busmap = snakemake.config["enable"].get("custom_busmap", False)
|
||||
clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap, aggregate_carriers,
|
||||
line_length_factor=line_length_factor,
|
||||
potential_mode=potential_mode,
|
||||
solver_name=snakemake.config['solving']['solver']['name'],
|
||||
extended_link_costs=hvac_overhead_cost,
|
||||
focus_weights=focus_weights)
|
||||
if custom_busmap:
|
||||
custom_busmap = pd.read_csv(snakemake.input.custom_busmap, index_col=0, squeeze=True)
|
||||
custom_busmap.index = custom_busmap.index.astype(str)
|
||||
logger.info(f"Imported custom busmap from {snakemake.input.custom_busmap}")
|
||||
|
||||
clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap, aggregate_carriers,
|
||||
line_length_factor, potential_mode,
|
||||
snakemake.config['solving']['solver']['name'],
|
||||
"kmeans", hvac_overhead_cost, focus_weights)
|
||||
|
||||
update_p_nom_max(n)
|
||||
|
||||
clustering.network.export_to_netcdf(snakemake.output.network)
|
||||
for attr in ('busmap', 'linemap'): #also available: linemap_positive, linemap_negative
|
||||
getattr(clustering, attr).to_csv(snakemake.output[attr])
|
||||
|
||||
cluster_regions((clustering.busmap,))
|
||||
cluster_regions((clustering.busmap,), snakemake.input, snakemake.output)
|
||||
|
@ -1,6 +1,6 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Creates summaries of aggregated energy and costs as ``.csv`` files.
|
||||
@ -61,7 +61,6 @@ import os
|
||||
import pypsa
|
||||
import pandas as pd
|
||||
|
||||
from six import iteritems
|
||||
from add_electricity import load_costs, update_transmission_costs
|
||||
|
||||
idx = pd.IndexSlice
|
||||
@ -113,15 +112,15 @@ def calculate_costs(n, label, costs):
|
||||
costs.loc[idx[raw_index],label] = capital_costs_grouped.values
|
||||
|
||||
if c.name == "Link":
|
||||
p = c.pnl.p0.multiply(n.snapshot_weightings,axis=0).sum()
|
||||
p = c.pnl.p0.multiply(n.snapshot_weightings.generators,axis=0).sum()
|
||||
elif c.name == "Line":
|
||||
continue
|
||||
elif c.name == "StorageUnit":
|
||||
p_all = c.pnl.p.multiply(n.snapshot_weightings,axis=0)
|
||||
p_all = c.pnl.p.multiply(n.snapshot_weightings.generators,axis=0)
|
||||
p_all[p_all < 0.] = 0.
|
||||
p = p_all.sum()
|
||||
else:
|
||||
p = c.pnl.p.multiply(n.snapshot_weightings,axis=0).sum()
|
||||
p = c.pnl.p.multiply(n.snapshot_weightings.generators,axis=0).sum()
|
||||
|
||||
marginal_costs = p*c.df.marginal_cost
|
||||
|
||||
@ -146,10 +145,12 @@ def calculate_energy(n, label, energy):
|
||||
|
||||
for c in n.iterate_components(n.one_port_components|n.branch_components):
|
||||
|
||||
if c.name in n.one_port_components:
|
||||
c_energies = c.pnl.p.multiply(n.snapshot_weightings,axis=0).sum().multiply(c.df.sign).groupby(c.df.carrier).sum()
|
||||
if c.name in {'Generator', 'Load', 'ShuntImpedance'}:
|
||||
c_energies = c.pnl.p.multiply(n.snapshot_weightings.generators,axis=0).sum().multiply(c.df.sign).groupby(c.df.carrier).sum()
|
||||
elif c.name in {'StorageUnit', 'Store'}:
|
||||
c_energies = c.pnl.p.multiply(n.snapshot_weightings.stores,axis=0).sum().multiply(c.df.sign).groupby(c.df.carrier).sum()
|
||||
else:
|
||||
c_energies = (-c.pnl.p1.multiply(n.snapshot_weightings,axis=0).sum() - c.pnl.p0.multiply(n.snapshot_weightings,axis=0).sum()).groupby(c.df.carrier).sum()
|
||||
c_energies = (-c.pnl.p1.multiply(n.snapshot_weightings.generators,axis=0).sum() - c.pnl.p0.multiply(n.snapshot_weightings.generators,axis=0).sum()).groupby(c.df.carrier).sum()
|
||||
|
||||
energy = include_in_summary(energy, [c.list_name], label, c_energies)
|
||||
|
||||
@ -171,6 +172,9 @@ def calculate_capacity(n,label,capacity):
|
||||
if 'p_nom_opt' in c.df.columns:
|
||||
c_capacities = abs(c.df.p_nom_opt.multiply(c.df.sign)).groupby(c.df.carrier).sum()
|
||||
capacity = include_in_summary(capacity, [c.list_name], label, c_capacities)
|
||||
elif 'e_nom_opt' in c.df.columns:
|
||||
c_capacities = abs(c.df.e_nom_opt.multiply(c.df.sign)).groupby(c.df.carrier).sum()
|
||||
capacity = include_in_summary(capacity, [c.list_name], label, c_capacities)
|
||||
|
||||
for c in n.iterate_components(n.passive_branch_components):
|
||||
c_capacities = c.df['s_nom_opt'].groupby(c.df.carrier).sum()
|
||||
@ -185,11 +189,11 @@ def calculate_capacity(n,label,capacity):
|
||||
def calculate_supply(n, label, supply):
|
||||
"""calculate the max dispatch of each component at the buses where the loads are attached"""
|
||||
|
||||
load_types = n.loads.carrier.value_counts().index
|
||||
load_types = n.buses.carrier.unique()
|
||||
|
||||
for i in load_types:
|
||||
|
||||
buses = n.loads.bus[n.loads.carrier == i].values
|
||||
buses = n.buses.query("carrier == @i").index
|
||||
|
||||
bus_map = pd.Series(False,index=n.buses.index)
|
||||
|
||||
@ -232,11 +236,11 @@ def calculate_supply(n, label, supply):
|
||||
def calculate_supply_energy(n, label, supply_energy):
|
||||
"""calculate the total dispatch of each component at the buses where the loads are attached"""
|
||||
|
||||
load_types = n.loads.carrier.value_counts().index
|
||||
load_types = n.buses.carrier.unique()
|
||||
|
||||
for i in load_types:
|
||||
|
||||
buses = n.loads.bus[n.loads.carrier == i].values
|
||||
buses = n.buses.query("carrier == @i").index
|
||||
|
||||
bus_map = pd.Series(False,index=n.buses.index)
|
||||
|
||||
@ -378,7 +382,7 @@ outputs = ["costs",
|
||||
]
|
||||
|
||||
|
||||
def make_summaries(networks_dict, country='all'):
|
||||
def make_summaries(networks_dict, paths, config, country='all'):
|
||||
|
||||
columns = pd.MultiIndex.from_tuples(networks_dict.keys(),names=["simpl","clusters","ll","opts"])
|
||||
|
||||
@ -387,7 +391,7 @@ def make_summaries(networks_dict, country='all'):
|
||||
for output in outputs:
|
||||
dfs[output] = pd.DataFrame(columns=columns,dtype=float)
|
||||
|
||||
for label, filename in iteritems(networks_dict):
|
||||
for label, filename in networks_dict.items():
|
||||
print(label, filename)
|
||||
if not os.path.exists(filename):
|
||||
print("does not exist!!")
|
||||
@ -402,10 +406,9 @@ def make_summaries(networks_dict, country='all'):
|
||||
if country != 'all':
|
||||
n = n[n.buses.country == country]
|
||||
|
||||
Nyears = n.snapshot_weightings.sum() / 8760.
|
||||
costs = load_costs(Nyears, snakemake.input[0],
|
||||
snakemake.config['costs'], snakemake.config['electricity'])
|
||||
update_transmission_costs(n, costs, simple_hvdc_costs=False)
|
||||
Nyears = n.snapshot_weightings.objective.sum() / 8760.
|
||||
costs = load_costs(paths[0], config['costs'], config['electricity'], Nyears)
|
||||
update_transmission_costs(n, costs)
|
||||
|
||||
assign_carriers(n)
|
||||
|
||||
@ -415,10 +418,9 @@ def make_summaries(networks_dict, country='all'):
|
||||
return dfs
|
||||
|
||||
|
||||
def to_csv(dfs):
|
||||
dir = snakemake.output[0]
|
||||
def to_csv(dfs, dir):
|
||||
os.makedirs(dir, exist_ok=True)
|
||||
for key, df in iteritems(dfs):
|
||||
for key, df in dfs.items():
|
||||
df.to_csv(os.path.join(dir, f"{key}.csv"))
|
||||
|
||||
|
||||
@ -432,25 +434,28 @@ if __name__ == "__main__":
|
||||
network_dir = os.path.join('results', 'networks')
|
||||
configure_logging(snakemake)
|
||||
|
||||
def expand_from_wildcard(key):
|
||||
w = getattr(snakemake.wildcards, key)
|
||||
return snakemake.config["scenario"][key] if w == "all" else [w]
|
||||
config = snakemake.config
|
||||
wildcards = snakemake.wildcards
|
||||
|
||||
if snakemake.wildcards.ll.endswith("all"):
|
||||
ll = snakemake.config["scenario"]["ll"]
|
||||
if len(snakemake.wildcards.ll) == 4:
|
||||
ll = [l for l in ll if l[0] == snakemake.wildcards.ll[0]]
|
||||
def expand_from_wildcard(key, config):
|
||||
w = getattr(wildcards, key)
|
||||
return config["scenario"][key] if w == "all" else [w]
|
||||
|
||||
if wildcards.ll.endswith("all"):
|
||||
ll = config["scenario"]["ll"]
|
||||
if len(wildcards.ll) == 4:
|
||||
ll = [l for l in ll if l[0] == wildcards.ll[0]]
|
||||
else:
|
||||
ll = [snakemake.wildcards.ll]
|
||||
ll = [wildcards.ll]
|
||||
|
||||
networks_dict = {(simpl,clusters,l,opts) :
|
||||
os.path.join(network_dir, f'{snakemake.wildcards.network}_s{simpl}_'
|
||||
os.path.join(network_dir, f'elec_s{simpl}_'
|
||||
f'{clusters}_ec_l{l}_{opts}.nc')
|
||||
for simpl in expand_from_wildcard("simpl")
|
||||
for clusters in expand_from_wildcard("clusters")
|
||||
for simpl in expand_from_wildcard("simpl", config)
|
||||
for clusters in expand_from_wildcard("clusters", config)
|
||||
for l in ll
|
||||
for opts in expand_from_wildcard("opts")}
|
||||
for opts in expand_from_wildcard("opts", config)}
|
||||
|
||||
dfs = make_summaries(networks_dict, country=snakemake.wildcards.country)
|
||||
dfs = make_summaries(networks_dict, snakemake.input, config, country=wildcards.country)
|
||||
|
||||
to_csv(dfs)
|
||||
to_csv(dfs, snakemake.output[0])
|
||||
|
@ -1,6 +1,6 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Plots map with pie charts and cost box bar charts.
|
||||
@ -20,12 +20,10 @@ Description
|
||||
"""
|
||||
|
||||
import logging
|
||||
from _helpers import (load_network_for_plots, aggregate_p, aggregate_costs,
|
||||
configure_logging)
|
||||
from _helpers import (load_network_for_plots, aggregate_p, aggregate_costs, configure_logging)
|
||||
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
from six.moves import zip
|
||||
|
||||
import cartopy.crs as ccrs
|
||||
import matplotlib.pyplot as plt
|
||||
@ -76,7 +74,7 @@ def set_plot_style():
|
||||
}])
|
||||
|
||||
|
||||
def plot_map(n, ax=None, attribute='p_nom', opts={}):
|
||||
def plot_map(n, opts, ax=None, attribute='p_nom'):
|
||||
if ax is None:
|
||||
ax = plt.gca()
|
||||
|
||||
@ -89,36 +87,43 @@ def plot_map(n, ax=None, attribute='p_nom', opts={}):
|
||||
# bus_sizes = n.generators_t.p.sum().loc[n.generators.carrier == "load"].groupby(n.generators.bus).sum()
|
||||
bus_sizes = pd.concat((n.generators.query('carrier != "load"').groupby(['bus', 'carrier']).p_nom_opt.sum(),
|
||||
n.storage_units.groupby(['bus', 'carrier']).p_nom_opt.sum()))
|
||||
line_widths_exp = dict(Line=n.lines.s_nom_opt, Link=n.links.p_nom_opt)
|
||||
line_widths_cur = dict(Line=n.lines.s_nom_min, Link=n.links.p_nom_min)
|
||||
line_widths_exp = n.lines.s_nom_opt
|
||||
line_widths_cur = n.lines.s_nom_min
|
||||
link_widths_exp = n.links.p_nom_opt
|
||||
link_widths_cur = n.links.p_nom_min
|
||||
else:
|
||||
raise 'plotting of {} has not been implemented yet'.format(attribute)
|
||||
|
||||
|
||||
line_colors_with_alpha = \
|
||||
dict(Line=(line_widths_cur['Line'] / n.lines.s_nom > 1e-3)
|
||||
.map({True: line_colors['cur'], False: to_rgba(line_colors['cur'], 0.)}),
|
||||
Link=(line_widths_cur['Link'] / n.links.p_nom > 1e-3)
|
||||
((line_widths_cur / n.lines.s_nom > 1e-3)
|
||||
.map({True: line_colors['cur'], False: to_rgba(line_colors['cur'], 0.)}))
|
||||
link_colors_with_alpha = \
|
||||
((link_widths_cur / n.links.p_nom > 1e-3)
|
||||
.map({True: line_colors['cur'], False: to_rgba(line_colors['cur'], 0.)}))
|
||||
|
||||
|
||||
## FORMAT
|
||||
linewidth_factor = opts['map'][attribute]['linewidth_factor']
|
||||
bus_size_factor = opts['map'][attribute]['bus_size_factor']
|
||||
|
||||
## PLOT
|
||||
n.plot(line_widths=pd.concat(line_widths_exp)/linewidth_factor,
|
||||
line_colors=dict(Line=line_colors['exp'], Link=line_colors['exp']),
|
||||
n.plot(line_widths=line_widths_exp/linewidth_factor,
|
||||
link_widths=link_widths_exp/linewidth_factor,
|
||||
line_colors=line_colors['exp'],
|
||||
link_colors=line_colors['exp'],
|
||||
bus_sizes=bus_sizes/bus_size_factor,
|
||||
bus_colors=tech_colors,
|
||||
boundaries=map_boundaries,
|
||||
geomap=True,
|
||||
color_geomap=True, geomap=True,
|
||||
ax=ax)
|
||||
n.plot(line_widths=pd.concat(line_widths_cur)/linewidth_factor,
|
||||
line_colors=pd.concat(line_colors_with_alpha),
|
||||
n.plot(line_widths=line_widths_cur/linewidth_factor,
|
||||
link_widths=link_widths_cur/linewidth_factor,
|
||||
line_colors=line_colors_with_alpha,
|
||||
link_colors=link_colors_with_alpha,
|
||||
bus_sizes=0,
|
||||
bus_colors=tech_colors,
|
||||
boundaries=map_boundaries,
|
||||
geomap=False,
|
||||
color_geomap=True, geomap=False,
|
||||
ax=ax)
|
||||
ax.set_aspect('equal')
|
||||
ax.axis('off')
|
||||
@ -139,7 +144,7 @@ def plot_map(n, ax=None, attribute='p_nom', opts={}):
|
||||
loc="upper left", bbox_to_anchor=(0.24, 1.01),
|
||||
frameon=False,
|
||||
labelspacing=0.8, handletextpad=1.5,
|
||||
title='Transmission Exist./Exp. ')
|
||||
title='Transmission Exp./Exist. ')
|
||||
ax.add_artist(l1_1)
|
||||
|
||||
handles = []
|
||||
@ -176,7 +181,7 @@ def plot_map(n, ax=None, attribute='p_nom', opts={}):
|
||||
return fig
|
||||
|
||||
|
||||
def plot_total_energy_pie(n, ax=None):
|
||||
def plot_total_energy_pie(n, opts, ax=None):
|
||||
if ax is None: ax = plt.gca()
|
||||
|
||||
ax.set_title('Energy per technology', fontdict=dict(fontsize="medium"))
|
||||
@ -194,10 +199,10 @@ def plot_total_energy_pie(n, ax=None):
|
||||
t1.remove()
|
||||
t2.remove()
|
||||
|
||||
def plot_total_cost_bar(n, ax=None):
|
||||
def plot_total_cost_bar(n, opts, ax=None):
|
||||
if ax is None: ax = plt.gca()
|
||||
|
||||
total_load = (n.snapshot_weightings * n.loads_t.p.sum(axis=1)).sum()
|
||||
total_load = (n.snapshot_weightings.generators * n.loads_t.p.sum(axis=1)).sum()
|
||||
tech_colors = opts['tech_colors']
|
||||
|
||||
def split_costs(n):
|
||||
@ -253,31 +258,32 @@ if __name__ == "__main__":
|
||||
|
||||
set_plot_style()
|
||||
|
||||
opts = snakemake.config['plotting']
|
||||
map_figsize = opts['map']['figsize']
|
||||
map_boundaries = opts['map']['boundaries']
|
||||
config, wildcards = snakemake.config, snakemake.wildcards
|
||||
|
||||
n = load_network_for_plots(snakemake.input.network, snakemake.input.tech_costs, snakemake.config)
|
||||
map_figsize = config["plotting"]['map']['figsize']
|
||||
map_boundaries = config["plotting"]['map']['boundaries']
|
||||
|
||||
scenario_opts = snakemake.wildcards.opts.split('-')
|
||||
n = load_network_for_plots(snakemake.input.network, snakemake.input.tech_costs, config)
|
||||
|
||||
scenario_opts = wildcards.opts.split('-')
|
||||
|
||||
fig, ax = plt.subplots(figsize=map_figsize, subplot_kw={"projection": ccrs.PlateCarree()})
|
||||
plot_map(n, ax, snakemake.wildcards.attr, opts)
|
||||
plot_map(n, config["plotting"], ax=ax, attribute=wildcards.attr)
|
||||
|
||||
fig.savefig(snakemake.output.only_map, dpi=150, bbox_inches='tight')
|
||||
|
||||
ax1 = fig.add_axes([-0.115, 0.625, 0.2, 0.2])
|
||||
plot_total_energy_pie(n, ax1)
|
||||
plot_total_energy_pie(n, config["plotting"], ax=ax1)
|
||||
|
||||
ax2 = fig.add_axes([-0.075, 0.1, 0.1, 0.45])
|
||||
plot_total_cost_bar(n, ax2)
|
||||
plot_total_cost_bar(n, config["plotting"], ax=ax2)
|
||||
|
||||
ll = snakemake.wildcards.ll
|
||||
ll = wildcards.ll
|
||||
ll_type = ll[0]
|
||||
ll_factor = ll[1:]
|
||||
lbl = dict(c='line cost', v='line volume')[ll_type]
|
||||
amnt = '{ll} x today\'s'.format(ll=ll_factor) if ll_factor != 'opt' else 'optimal'
|
||||
fig.suptitle('Expansion to {amount} {label} at {clusters} clusters'
|
||||
.format(amount=amnt, label=lbl, clusters=snakemake.wildcards.clusters))
|
||||
.format(amount=amnt, label=lbl, clusters=wildcards.clusters))
|
||||
|
||||
fig.savefig(snakemake.output.ext, transparent=True, bbox_inches='tight')
|
||||
|
@ -1,6 +1,6 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Plots renewable installation potentials per capacity factor.
|
||||
|
@ -1,6 +1,6 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Plots energy and cost summaries for solved networks.
|
||||
@ -55,7 +55,7 @@ def rename_techs(label):
|
||||
preferred_order = pd.Index(["transmission lines","hydroelectricity","hydro reservoir","run of river","pumped hydro storage","onshore wind","offshore wind ac", "offshore wind dc","solar PV","solar thermal","OCGT","hydrogen storage","battery storage"])
|
||||
|
||||
|
||||
def plot_costs(infn, fn=None):
|
||||
def plot_costs(infn, config, fn=None):
|
||||
|
||||
## For now ignore the simpl header
|
||||
cost_df = pd.read_csv(infn,index_col=list(range(3)),header=[1,2,3])
|
||||
@ -67,7 +67,7 @@ def plot_costs(infn, fn=None):
|
||||
|
||||
df = df.groupby(df.index.map(rename_techs)).sum()
|
||||
|
||||
to_drop = df.index[df.max(axis=1) < snakemake.config['plotting']['costs_threshold']]
|
||||
to_drop = df.index[df.max(axis=1) < config['plotting']['costs_threshold']]
|
||||
|
||||
print("dropping")
|
||||
|
||||
@ -84,7 +84,7 @@ def plot_costs(infn, fn=None):
|
||||
fig, ax = plt.subplots()
|
||||
fig.set_size_inches((12,8))
|
||||
|
||||
df.loc[new_index,new_columns].T.plot(kind="bar",ax=ax,stacked=True,color=[snakemake.config['plotting']['tech_colors'][i] for i in new_index])
|
||||
df.loc[new_index,new_columns].T.plot(kind="bar",ax=ax,stacked=True,color=[config['plotting']['tech_colors'][i] for i in new_index])
|
||||
|
||||
|
||||
handles,labels = ax.get_legend_handles_labels()
|
||||
@ -92,7 +92,7 @@ def plot_costs(infn, fn=None):
|
||||
handles.reverse()
|
||||
labels.reverse()
|
||||
|
||||
ax.set_ylim([0,snakemake.config['plotting']['costs_max']])
|
||||
ax.set_ylim([0,config['plotting']['costs_max']])
|
||||
|
||||
ax.set_ylabel("System Cost [EUR billion per year]")
|
||||
|
||||
@ -109,7 +109,7 @@ def plot_costs(infn, fn=None):
|
||||
fig.savefig(fn, transparent=True)
|
||||
|
||||
|
||||
def plot_energy(infn, fn=None):
|
||||
def plot_energy(infn, config, fn=None):
|
||||
|
||||
energy_df = pd.read_csv(infn, index_col=list(range(2)),header=[1,2,3])
|
||||
|
||||
@ -120,7 +120,7 @@ def plot_energy(infn, fn=None):
|
||||
|
||||
df = df.groupby(df.index.map(rename_techs)).sum()
|
||||
|
||||
to_drop = df.index[df.abs().max(axis=1) < snakemake.config['plotting']['energy_threshold']]
|
||||
to_drop = df.index[df.abs().max(axis=1) < config['plotting']['energy_threshold']]
|
||||
|
||||
print("dropping")
|
||||
|
||||
@ -137,7 +137,7 @@ def plot_energy(infn, fn=None):
|
||||
fig, ax = plt.subplots()
|
||||
fig.set_size_inches((12,8))
|
||||
|
||||
df.loc[new_index,new_columns].T.plot(kind="bar",ax=ax,stacked=True,color=[snakemake.config['plotting']['tech_colors'][i] for i in new_index])
|
||||
df.loc[new_index,new_columns].T.plot(kind="bar",ax=ax,stacked=True,color=[config['plotting']['tech_colors'][i] for i in new_index])
|
||||
|
||||
|
||||
handles,labels = ax.get_legend_handles_labels()
|
||||
@ -145,7 +145,7 @@ def plot_energy(infn, fn=None):
|
||||
handles.reverse()
|
||||
labels.reverse()
|
||||
|
||||
ax.set_ylim([snakemake.config['plotting']['energy_min'],snakemake.config['plotting']['energy_max']])
|
||||
ax.set_ylim([config['plotting']['energy_min'], config['plotting']['energy_max']])
|
||||
|
||||
ax.set_ylabel("Energy [TWh/a]")
|
||||
|
||||
@ -170,10 +170,12 @@ if __name__ == "__main__":
|
||||
attr='', ext='png', country='all')
|
||||
configure_logging(snakemake)
|
||||
|
||||
config = snakemake.config
|
||||
|
||||
summary = snakemake.wildcards.summary
|
||||
try:
|
||||
func = globals()[f"plot_{summary}"]
|
||||
except KeyError:
|
||||
raise RuntimeError(f"plotting function for {summary} has not been defined")
|
||||
|
||||
func(os.path.join(snakemake.input[0], f"{summary}.csv"), snakemake.output[0])
|
||||
func(os.path.join(snakemake.input[0], f"{summary}.csv"), config, snakemake.output[0])
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Extracts capacities of HVDC links from `Wikipedia <https://en.wikipedia.org/wiki/List_of_HVDC_projects>`_.
|
||||
|
@ -1,6 +1,6 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
# coding: utf-8
|
||||
"""
|
||||
@ -63,7 +63,6 @@ import re
|
||||
import pypsa
|
||||
import numpy as np
|
||||
import pandas as pd
|
||||
from six import iteritems
|
||||
|
||||
from add_electricity import load_costs, update_transmission_costs
|
||||
|
||||
@ -72,21 +71,14 @@ idx = pd.IndexSlice
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def add_co2limit(n, Nyears=1., factor=None):
|
||||
|
||||
if factor is not None:
|
||||
annual_emissions = factor*snakemake.config['electricity']['co2base']
|
||||
else:
|
||||
annual_emissions = snakemake.config['electricity']['co2limit']
|
||||
def add_co2limit(n, co2limit, Nyears=1.):
|
||||
|
||||
n.add("GlobalConstraint", "CO2Limit",
|
||||
carrier_attribute="co2_emissions", sense="<=",
|
||||
constant=annual_emissions * Nyears)
|
||||
constant=co2limit * Nyears)
|
||||
|
||||
|
||||
def add_emission_prices(n, emission_prices=None, exclude_co2=False):
|
||||
if emission_prices is None:
|
||||
emission_prices = snakemake.config['costs']['emission_prices']
|
||||
def add_emission_prices(n, emission_prices={'co2': 0.}, exclude_co2=False):
|
||||
if exclude_co2: emission_prices.pop('co2')
|
||||
ep = (pd.Series(emission_prices).rename(lambda x: x+'_emissions') *
|
||||
n.carriers.filter(like='_emissions')).sum(axis=1)
|
||||
@ -96,13 +88,12 @@ def add_emission_prices(n, emission_prices=None, exclude_co2=False):
|
||||
n.storage_units['marginal_cost'] += su_ep
|
||||
|
||||
|
||||
def set_line_s_max_pu(n):
|
||||
s_max_pu = snakemake.config['lines']['s_max_pu']
|
||||
def set_line_s_max_pu(n, s_max_pu = 0.7):
|
||||
n.lines['s_max_pu'] = s_max_pu
|
||||
logger.info(f"N-1 security margin of lines set to {s_max_pu}")
|
||||
|
||||
|
||||
def set_transmission_limit(n, ll_type, factor, Nyears=1):
|
||||
def set_transmission_limit(n, ll_type, factor, costs, Nyears=1):
|
||||
links_dc_b = n.links.carrier == 'DC' if not n.links.empty else pd.Series()
|
||||
|
||||
_lines_s_nom = (np.sqrt(3) * n.lines.type.map(n.line_types.i_nom) *
|
||||
@ -114,10 +105,7 @@ def set_transmission_limit(n, ll_type, factor, Nyears=1):
|
||||
ref = (lines_s_nom @ n.lines[col] +
|
||||
n.links.loc[links_dc_b, "p_nom"] @ n.links.loc[links_dc_b, col])
|
||||
|
||||
costs = load_costs(Nyears, snakemake.input.tech_costs,
|
||||
snakemake.config['costs'],
|
||||
snakemake.config['electricity'])
|
||||
update_transmission_costs(n, costs, simple_hvdc_costs=False)
|
||||
update_transmission_costs(n, costs)
|
||||
|
||||
if factor == 'opt' or float(factor) > 1.0:
|
||||
n.lines['s_nom_min'] = lines_s_nom
|
||||
@ -146,13 +134,14 @@ def average_every_nhours(n, offset):
|
||||
|
||||
for c in n.iterate_components():
|
||||
pnl = getattr(m, c.list_name+"_t")
|
||||
for k, df in iteritems(c.pnl):
|
||||
for k, df in c.pnl.items():
|
||||
if not df.empty:
|
||||
pnl[k] = df.resample(offset).mean()
|
||||
|
||||
return m
|
||||
|
||||
def apply_time_segmentation(n, segments):
|
||||
|
||||
def apply_time_segmentation(n, segments, solver_name="cbc"):
|
||||
logger.info(f"Aggregating time series to {segments} segments.")
|
||||
try:
|
||||
import tsam.timeseriesaggregation as tsam
|
||||
@ -171,8 +160,6 @@ def apply_time_segmentation(n, segments):
|
||||
|
||||
raw = pd.concat([p_max_pu, load, inflow], axis=1, sort=False)
|
||||
|
||||
solver_name = snakemake.config["solving"]["solver"]["name"]
|
||||
|
||||
agg = tsam.TimeSeriesAggregation(raw, hoursPerPeriod=len(raw),
|
||||
noTypicalPeriods=1, noSegments=int(segments),
|
||||
segmentation=True, solver=solver_name)
|
||||
@ -209,9 +196,7 @@ def enforce_autarky(n, only_crossborder=False):
|
||||
n.mremove("Line", lines_rm)
|
||||
n.mremove("Link", links_rm)
|
||||
|
||||
def set_line_nom_max(n):
|
||||
s_nom_max_set = snakemake.config["lines"].get("s_nom_max,", np.inf)
|
||||
p_nom_max_set = snakemake.config["links"].get("p_nom_max", np.inf)
|
||||
def set_line_nom_max(n, s_nom_max_set=np.inf, p_nom_max_set=np.inf):
|
||||
n.lines.s_nom_max.clip(upper=s_nom_max_set, inplace=True)
|
||||
n.links.p_nom_max.clip(upper=p_nom_max_set, inplace=True)
|
||||
|
||||
@ -225,9 +210,10 @@ if __name__ == "__main__":
|
||||
opts = snakemake.wildcards.opts.split('-')
|
||||
|
||||
n = pypsa.Network(snakemake.input[0])
|
||||
Nyears = n.snapshot_weightings.sum() / 8760.
|
||||
Nyears = n.snapshot_weightings.objective.sum() / 8760.
|
||||
costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears)
|
||||
|
||||
set_line_s_max_pu(n)
|
||||
set_line_s_max_pu(n, snakemake.config['lines']['s_max_pu'])
|
||||
|
||||
for o in opts:
|
||||
m = re.match(r'^\d+h$', o, re.IGNORECASE)
|
||||
@ -238,16 +224,18 @@ if __name__ == "__main__":
|
||||
for o in opts:
|
||||
m = re.match(r'^\d+seg$', o, re.IGNORECASE)
|
||||
if m is not None:
|
||||
n = apply_time_segmentation(n, m.group(0)[:-3])
|
||||
solver_name = snakemake.config["solving"]["solver"]["name"]
|
||||
n = apply_time_segmentation(n, m.group(0)[:-3], solver_name)
|
||||
break
|
||||
|
||||
for o in opts:
|
||||
if "Co2L" in o:
|
||||
m = re.findall("[0-9]*\.?[0-9]+$", o)
|
||||
if len(m) > 0:
|
||||
add_co2limit(n, Nyears, float(m[0]))
|
||||
co2limit = float(m[0]) * snakemake.config['electricity']['co2base']
|
||||
add_co2limit(n, co2limit, Nyears)
|
||||
else:
|
||||
add_co2limit(n, Nyears)
|
||||
add_co2limit(n, snakemake.config['electricity']['co2limit'], Nyears)
|
||||
break
|
||||
|
||||
for o in opts:
|
||||
@ -268,12 +256,13 @@ if __name__ == "__main__":
|
||||
c.df.loc[sel,attr] *= factor
|
||||
|
||||
if 'Ep' in opts:
|
||||
add_emission_prices(n)
|
||||
add_emission_prices(n, snakemake.config['costs']['emission_prices'])
|
||||
|
||||
ll_type, factor = snakemake.wildcards.ll[0], snakemake.wildcards.ll[1:]
|
||||
set_transmission_limit(n, ll_type, factor, Nyears)
|
||||
set_transmission_limit(n, ll_type, factor, costs, Nyears)
|
||||
|
||||
set_line_nom_max(n)
|
||||
set_line_nom_max(n, s_nom_max_set=snakemake.config["lines"].get("s_nom_max,", np.inf),
|
||||
p_nom_max_set=snakemake.config["links"].get("p_nom_max,", np.inf))
|
||||
|
||||
if "ATK" in opts:
|
||||
enforce_autarky(n)
|
||||
|
@ -1,7 +1,7 @@
|
||||
# Copyright 2019-2020 Fabian Hofmann (FIAS)
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.3517935.svg
|
||||
|
@ -1,6 +1,6 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
# coding: utf-8
|
||||
"""
|
||||
@ -84,7 +84,7 @@ The rule :mod:`simplify_network` does up to four things:
|
||||
"""
|
||||
|
||||
import logging
|
||||
from _helpers import configure_logging
|
||||
from _helpers import configure_logging, update_p_nom_max
|
||||
|
||||
from cluster_network import clustering_for_n_clusters, cluster_regions
|
||||
from add_electricity import load_costs
|
||||
@ -94,12 +94,11 @@ import numpy as np
|
||||
import scipy as sp
|
||||
from scipy.sparse.csgraph import connected_components, dijkstra
|
||||
|
||||
from six import iteritems
|
||||
from six.moves import reduce
|
||||
from functools import reduce
|
||||
|
||||
import pypsa
|
||||
from pypsa.io import import_components_from_dataframe, import_series_from_dataframe
|
||||
from pypsa.networkclustering import busmap_by_stubs, aggregategenerators, aggregateoneport
|
||||
from pypsa.networkclustering import busmap_by_stubs, aggregategenerators, aggregateoneport, get_clustering_from_busmap, _make_consense
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@ -126,7 +125,8 @@ def simplify_network_to_380(n):
|
||||
several_trafo_b = trafo_map.isin(trafo_map.index)
|
||||
trafo_map.loc[several_trafo_b] = trafo_map.loc[several_trafo_b].map(trafo_map)
|
||||
missing_buses_i = n.buses.index.difference(trafo_map.index)
|
||||
trafo_map = trafo_map.append(pd.Series(missing_buses_i, missing_buses_i))
|
||||
missing = pd.Series(missing_buses_i, missing_buses_i)
|
||||
trafo_map = pd.concat([trafo_map, missing])
|
||||
|
||||
for c in n.one_port_components|n.branch_components:
|
||||
df = n.df(c)
|
||||
@ -140,18 +140,15 @@ def simplify_network_to_380(n):
|
||||
return n, trafo_map
|
||||
|
||||
|
||||
def _prepare_connection_costs_per_link(n):
|
||||
def _prepare_connection_costs_per_link(n, costs, config):
|
||||
if n.links.empty: return {}
|
||||
|
||||
costs = load_costs(n.snapshot_weightings.sum() / 8760, snakemake.input.tech_costs,
|
||||
snakemake.config['costs'], snakemake.config['electricity'])
|
||||
|
||||
connection_costs_per_link = {}
|
||||
|
||||
for tech in snakemake.config['renewable']:
|
||||
for tech in config['renewable']:
|
||||
if tech.startswith('offwind'):
|
||||
connection_costs_per_link[tech] = (
|
||||
n.links.length * snakemake.config['lines']['length_factor'] *
|
||||
n.links.length * config['lines']['length_factor'] *
|
||||
(n.links.underwater_fraction * costs.at[tech + '-connection-submarine', 'capital_cost'] +
|
||||
(1. - n.links.underwater_fraction) * costs.at[tech + '-connection-underground', 'capital_cost'])
|
||||
)
|
||||
@ -159,9 +156,9 @@ def _prepare_connection_costs_per_link(n):
|
||||
return connection_costs_per_link
|
||||
|
||||
|
||||
def _compute_connection_costs_to_bus(n, busmap, connection_costs_per_link=None, buses=None):
|
||||
def _compute_connection_costs_to_bus(n, busmap, costs, config, connection_costs_per_link=None, buses=None):
|
||||
if connection_costs_per_link is None:
|
||||
connection_costs_per_link = _prepare_connection_costs_per_link(n)
|
||||
connection_costs_per_link = _prepare_connection_costs_per_link(n, costs, config)
|
||||
|
||||
if buses is None:
|
||||
buses = busmap.index[busmap.index != busmap.values]
|
||||
@ -179,7 +176,8 @@ def _compute_connection_costs_to_bus(n, busmap, connection_costs_per_link=None,
|
||||
return connection_costs_to_bus
|
||||
|
||||
|
||||
def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus):
|
||||
def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus, output):
|
||||
connection_costs = {}
|
||||
for tech in connection_costs_to_bus:
|
||||
tech_b = n.generators.carrier == tech
|
||||
costs = n.generators.loc[tech_b, "bus"].map(connection_costs_to_bus[tech]).loc[lambda s: s>0]
|
||||
@ -187,20 +185,23 @@ def _adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus):
|
||||
n.generators.loc[costs.index, "capital_cost"] += costs
|
||||
logger.info("Displacing {} generator(s) and adding connection costs to capital_costs: {} "
|
||||
.format(tech, ", ".join("{:.0f} Eur/MW/a for `{}`".format(d, b) for b, d in costs.iteritems())))
|
||||
connection_costs[tech] = costs
|
||||
pd.DataFrame(connection_costs).to_csv(output.connection_costs)
|
||||
|
||||
|
||||
|
||||
def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate_one_ports={"Load", "StorageUnit"}):
|
||||
def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, output, aggregate_one_ports={"Load", "StorageUnit"}):
|
||||
def replace_components(n, c, df, pnl):
|
||||
n.mremove(c, n.df(c).index)
|
||||
|
||||
import_components_from_dataframe(n, df, c)
|
||||
for attr, df in iteritems(pnl):
|
||||
for attr, df in pnl.items():
|
||||
if not df.empty:
|
||||
import_series_from_dataframe(n, df, c, attr)
|
||||
|
||||
_adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus)
|
||||
_adjust_capital_costs_using_connection_costs(n, connection_costs_to_bus, output)
|
||||
|
||||
generators, generators_pnl = aggregategenerators(n, busmap)
|
||||
generators, generators_pnl = aggregategenerators(n, busmap, custom_strategies={'p_nom_min': np.sum})
|
||||
replace_components(n, "Generator", generators, generators_pnl)
|
||||
|
||||
for one_port in aggregate_one_ports:
|
||||
@ -214,7 +215,7 @@ def _aggregate_and_move_components(n, busmap, connection_costs_to_bus, aggregate
|
||||
n.mremove(c, df.index[df.bus0.isin(buses_to_del) | df.bus1.isin(buses_to_del)])
|
||||
|
||||
|
||||
def simplify_links(n):
|
||||
def simplify_links(n, costs, config, output):
|
||||
## Complex multi-node links are folded into end-points
|
||||
logger.info("Simplifying connected link components")
|
||||
|
||||
@ -238,7 +239,7 @@ def simplify_links(n):
|
||||
if len(G.adj[m]) > 2 or (set(G.adj[m]) - nodes)}
|
||||
|
||||
for u in supernodes:
|
||||
for m, ls in iteritems(G.adj[u]):
|
||||
for m, ls in G.adj[u].items():
|
||||
if m not in nodes or m in seen: continue
|
||||
|
||||
buses = [u, m]
|
||||
@ -246,7 +247,7 @@ def simplify_links(n):
|
||||
|
||||
while m not in (supernodes | seen):
|
||||
seen.add(m)
|
||||
for m2, ls in iteritems(G.adj[m]):
|
||||
for m2, ls in G.adj[m].items():
|
||||
if m2 in seen or m2 == u: continue
|
||||
buses.append(m2)
|
||||
links.append(list(ls)) # [name for name in ls])
|
||||
@ -261,7 +262,7 @@ def simplify_links(n):
|
||||
|
||||
busmap = n.buses.index.to_series()
|
||||
|
||||
connection_costs_per_link = _prepare_connection_costs_per_link(n)
|
||||
connection_costs_per_link = _prepare_connection_costs_per_link(n, costs, config)
|
||||
connection_costs_to_bus = pd.DataFrame(0., index=n.buses.index, columns=list(connection_costs_per_link))
|
||||
|
||||
for lbl in labels.value_counts().loc[lambda s: s > 2].index:
|
||||
@ -275,11 +276,11 @@ def simplify_links(n):
|
||||
m = sp.spatial.distance_matrix(n.buses.loc[b, ['x', 'y']],
|
||||
n.buses.loc[buses[1:-1], ['x', 'y']])
|
||||
busmap.loc[buses] = b[np.r_[0, m.argmin(axis=0), 1]]
|
||||
connection_costs_to_bus.loc[buses] += _compute_connection_costs_to_bus(n, busmap, connection_costs_per_link, buses)
|
||||
connection_costs_to_bus.loc[buses] += _compute_connection_costs_to_bus(n, busmap, costs, config, connection_costs_per_link, buses)
|
||||
|
||||
all_links = [i for _, i in sum(links, [])]
|
||||
|
||||
p_max_pu = snakemake.config['links'].get('p_max_pu', 1.)
|
||||
p_max_pu = config['links'].get('p_max_pu', 1.)
|
||||
lengths = n.links.loc[all_links, 'length']
|
||||
name = lengths.idxmax() + '+{}'.format(len(links) - 1)
|
||||
params = dict(
|
||||
@ -306,39 +307,77 @@ def simplify_links(n):
|
||||
|
||||
logger.debug("Collecting all components using the busmap")
|
||||
|
||||
_aggregate_and_move_components(n, busmap, connection_costs_to_bus)
|
||||
_aggregate_and_move_components(n, busmap, connection_costs_to_bus, output)
|
||||
return n, busmap
|
||||
|
||||
|
||||
def remove_stubs(n):
|
||||
def remove_stubs(n, costs, config, output):
|
||||
logger.info("Removing stubs")
|
||||
|
||||
busmap = busmap_by_stubs(n) # ['country'])
|
||||
|
||||
connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap)
|
||||
connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap, costs, config)
|
||||
|
||||
_aggregate_and_move_components(n, busmap, connection_costs_to_bus)
|
||||
_aggregate_and_move_components(n, busmap, connection_costs_to_bus, output)
|
||||
|
||||
return n, busmap
|
||||
|
||||
def aggregate_to_substations(n, buses_i=None):
|
||||
# can be used to aggregate a selection of buses to electrically closest neighbors
|
||||
# if no buses are given, nodes that are no substations or without offshore connection are aggregated
|
||||
|
||||
if buses_i is None:
|
||||
logger.info("Aggregating buses that are no substations or have no valid offshore connection")
|
||||
buses_i = list(set(n.buses.index)-set(n.generators.bus)-set(n.loads.bus))
|
||||
|
||||
def cluster(n, n_clusters):
|
||||
weight = pd.concat({'Line': n.lines.length/n.lines.s_nom.clip(1e-3),
|
||||
'Link': n.links.length/n.links.p_nom.clip(1e-3)})
|
||||
|
||||
adj = n.adjacency_matrix(branch_components=['Line', 'Link'], weights=weight)
|
||||
|
||||
bus_indexer = n.buses.index.get_indexer(buses_i)
|
||||
dist = pd.DataFrame(dijkstra(adj, directed=False, indices=bus_indexer), buses_i, n.buses.index)
|
||||
|
||||
dist[buses_i] = np.inf # bus in buses_i should not be assigned to different bus in buses_i
|
||||
|
||||
for c in n.buses.country.unique():
|
||||
incountry_b = n.buses.country == c
|
||||
dist.loc[incountry_b, ~incountry_b] = np.inf
|
||||
|
||||
busmap = n.buses.index.to_series()
|
||||
busmap.loc[buses_i] = dist.idxmin(1)
|
||||
|
||||
clustering = get_clustering_from_busmap(n, busmap,
|
||||
bus_strategies=dict(country=_make_consense("Bus", "country")),
|
||||
aggregate_generators_weighted=True,
|
||||
aggregate_generators_carriers=None,
|
||||
aggregate_one_ports=["Load", "StorageUnit"],
|
||||
line_length_factor=1.0,
|
||||
generator_strategies={'p_nom_max': 'sum'},
|
||||
scale_link_capital_costs=False)
|
||||
|
||||
return clustering.network, busmap
|
||||
|
||||
|
||||
def cluster(n, n_clusters, config):
|
||||
logger.info(f"Clustering to {n_clusters} buses")
|
||||
|
||||
focus_weights = config.get('focus_weights', None)
|
||||
|
||||
renewable_carriers = pd.Index([tech
|
||||
for tech in n.generators.carrier.unique()
|
||||
if tech.split('-', 2)[0] in snakemake.config['renewable']])
|
||||
if tech.split('-', 2)[0] in config['renewable']])
|
||||
def consense(x):
|
||||
v = x.iat[0]
|
||||
assert ((x == v).all() or x.isnull().all()), (
|
||||
"The `potential` configuration option must agree for all renewable carriers, for now!"
|
||||
)
|
||||
return v
|
||||
potential_mode = (consense(pd.Series([snakemake.config['renewable'][tech]['potential']
|
||||
potential_mode = (consense(pd.Series([config['renewable'][tech]['potential']
|
||||
for tech in renewable_carriers]))
|
||||
if len(renewable_carriers) > 0 else 'conservative')
|
||||
clustering = clustering_for_n_clusters(n, n_clusters, custom_busmap=False, potential_mode=potential_mode,
|
||||
solver_name=snakemake.config['solving']['solver']['name'])
|
||||
solver_name=config['solving']['solver']['name'],
|
||||
focus_weights=focus_weights)
|
||||
|
||||
return clustering.network, clustering.busmap
|
||||
|
||||
@ -353,16 +392,31 @@ if __name__ == "__main__":
|
||||
|
||||
n, trafo_map = simplify_network_to_380(n)
|
||||
|
||||
n, simplify_links_map = simplify_links(n)
|
||||
Nyears = n.snapshot_weightings.objective.sum() / 8760
|
||||
|
||||
n, stub_map = remove_stubs(n)
|
||||
technology_costs = load_costs(snakemake.input.tech_costs, snakemake.config['costs'], snakemake.config['electricity'], Nyears)
|
||||
|
||||
n, simplify_links_map = simplify_links(n, technology_costs, snakemake.config, snakemake.output)
|
||||
|
||||
n, stub_map = remove_stubs(n, technology_costs, snakemake.config, snakemake.output)
|
||||
|
||||
busmaps = [trafo_map, simplify_links_map, stub_map]
|
||||
|
||||
if snakemake.config.get('clustering', {}).get('simplify', {}).get('to_substations', False):
|
||||
n, substation_map = aggregate_to_substations(n)
|
||||
busmaps.append(substation_map)
|
||||
|
||||
if snakemake.wildcards.simpl:
|
||||
n, cluster_map = cluster(n, int(snakemake.wildcards.simpl))
|
||||
n, cluster_map = cluster(n, int(snakemake.wildcards.simpl), snakemake.config)
|
||||
busmaps.append(cluster_map)
|
||||
|
||||
# some entries in n.buses are not updated in previous functions, therefore can be wrong. as they are not needed
|
||||
# and are lost when clustering (for example with the simpl wildcard), we remove them for consistency:
|
||||
buses_c = {'symbol', 'tags', 'under_construction', 'substation_lv', 'substation_off'}.intersection(n.buses.columns)
|
||||
n.buses = n.buses.drop(buses_c, axis=1)
|
||||
|
||||
update_p_nom_max(n)
|
||||
|
||||
n.export_to_netcdf(snakemake.output.network)
|
||||
|
||||
busmap_s = reduce(lambda x, y: x.map(y), busmaps[1:], busmaps[0])
|
||||
|
@ -1,6 +1,6 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Solves linear optimal power flow for a network iteratively while updating reactances.
|
||||
@ -100,9 +100,10 @@ def prepare_network(n, solve_opts):
|
||||
df.where(df>solve_opts['clip_p_max_pu'], other=0., inplace=True)
|
||||
|
||||
if solve_opts.get('load_shedding'):
|
||||
n.add("Carrier", "Load")
|
||||
n.madd("Generator", n.buses.index, " load",
|
||||
bus=n.buses.index,
|
||||
n.add("Carrier", "load", color="#dd2e23", nice_name="Load shedding")
|
||||
buses_i = n.buses.query("carrier == 'AC'").index
|
||||
n.madd("Generator", buses_i, " load",
|
||||
bus=buses_i,
|
||||
carrier='load',
|
||||
sign=1e-3, # Adjust sign to measure p and p_nom in kW instead of MW
|
||||
marginal_cost=1e2, # Eur/kWh
|
||||
@ -127,7 +128,7 @@ def prepare_network(n, solve_opts):
|
||||
if solve_opts.get('nhours'):
|
||||
nhours = solve_opts['nhours']
|
||||
n.set_snapshots(n.snapshots[:nhours])
|
||||
n.snapshot_weightings[:] = 8760./nhours
|
||||
n.snapshot_weightings[:] = 8760. / nhours
|
||||
|
||||
return n
|
||||
|
||||
@ -174,16 +175,16 @@ def add_EQ_constraints(n, o, scaling=1e-1):
|
||||
ggrouper = n.generators.bus
|
||||
lgrouper = n.loads.bus
|
||||
sgrouper = n.storage_units.bus
|
||||
load = n.snapshot_weightings @ \
|
||||
load = n.snapshot_weightings.generators @ \
|
||||
n.loads_t.p_set.groupby(lgrouper, axis=1).sum()
|
||||
inflow = n.snapshot_weightings @ \
|
||||
inflow = n.snapshot_weightings.stores @ \
|
||||
n.storage_units_t.inflow.groupby(sgrouper, axis=1).sum()
|
||||
inflow = inflow.reindex(load.index).fillna(0.)
|
||||
rhs = scaling * ( level * load - inflow )
|
||||
lhs_gen = linexpr((n.snapshot_weightings * scaling,
|
||||
lhs_gen = linexpr((n.snapshot_weightings.generators * scaling,
|
||||
get_var(n, "Generator", "p").T)
|
||||
).T.groupby(ggrouper, axis=1).apply(join_exprs)
|
||||
lhs_spill = linexpr((-n.snapshot_weightings * scaling,
|
||||
lhs_spill = linexpr((-n.snapshot_weightings.stores * scaling,
|
||||
get_var(n, "StorageUnit", "spill").T)
|
||||
).T.groupby(sgrouper, axis=1).apply(join_exprs)
|
||||
lhs_spill = lhs_spill.reindex(lhs_gen.index).fillna("")
|
||||
@ -241,7 +242,7 @@ def extra_functionality(n, snapshots):
|
||||
add_battery_constraints(n)
|
||||
|
||||
|
||||
def solve_network(n, config, solver_log=None, opts='', **kwargs):
|
||||
def solve_network(n, config, opts='', **kwargs):
|
||||
solver_options = config['solving']['solver'].copy()
|
||||
solver_name = solver_options.pop('name')
|
||||
cf_solving = config['solving']['options']
|
||||
@ -253,7 +254,12 @@ def solve_network(n, config, solver_log=None, opts='', **kwargs):
|
||||
n.config = config
|
||||
n.opts = opts
|
||||
|
||||
if cf_solving.get('skip_iterations', False):
|
||||
skip_iterations = cf_solving.get('skip_iterations', False)
|
||||
if not n.lines.s_nom_extendable.any():
|
||||
skip_iterations = True
|
||||
logger.info("No expandable lines found. Skipping iterative solving.")
|
||||
|
||||
if skip_iterations:
|
||||
network_lopf(n, solver_name=solver_name, solver_options=solver_options,
|
||||
extra_functionality=extra_functionality, **kwargs)
|
||||
else:
|
||||
@ -282,8 +288,8 @@ if __name__ == "__main__":
|
||||
with memory_logger(filename=fn, interval=30.) as mem:
|
||||
n = pypsa.Network(snakemake.input[0])
|
||||
n = prepare_network(n, solve_opts)
|
||||
n = solve_network(n, config=snakemake.config, solver_dir=tmpdir,
|
||||
solver_log=snakemake.log.solver, opts=opts)
|
||||
n = solve_network(n, snakemake.config, opts, solver_dir=tmpdir,
|
||||
solver_logfile=snakemake.log.solver)
|
||||
n.export_to_netcdf(snakemake.output[0])
|
||||
|
||||
logger.info("Maximum memory usage: {}".format(mem.mem_usage))
|
||||
|
@ -1,6 +1,6 @@
|
||||
# SPDX-FileCopyrightText: : 2017-2020 The PyPSA-Eur Authors
|
||||
#
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
"""
|
||||
Solves linear optimal dispatch in hourly resolution
|
||||
@ -71,7 +71,7 @@ def set_parameters_from_optimized(n, n_optim):
|
||||
n_optim.lines[attr].reindex(lines_untyped_i, fill_value=0.)
|
||||
n.lines['s_nom_extendable'] = False
|
||||
|
||||
links_dc_i = n.links.index[n.links.carrier == 'DC']
|
||||
links_dc_i = n.links.index[n.links.p_nom_extendable]
|
||||
n.links.loc[links_dc_i, 'p_nom'] = \
|
||||
n_optim.links['p_nom_opt'].reindex(links_dc_i, fill_value=0.)
|
||||
n.links.loc[links_dc_i, 'p_nom_extendable'] = False
|
||||
@ -81,10 +81,15 @@ def set_parameters_from_optimized(n, n_optim):
|
||||
n_optim.generators['p_nom_opt'].reindex(gen_extend_i, fill_value=0.)
|
||||
n.generators.loc[gen_extend_i, 'p_nom_extendable'] = False
|
||||
|
||||
stor_extend_i = n.storage_units.index[n.storage_units.p_nom_extendable]
|
||||
n.storage_units.loc[stor_extend_i, 'p_nom'] = \
|
||||
n_optim.storage_units['p_nom_opt'].reindex(stor_extend_i, fill_value=0.)
|
||||
n.storage_units.loc[stor_extend_i, 'p_nom_extendable'] = False
|
||||
stor_units_extend_i = n.storage_units.index[n.storage_units.p_nom_extendable]
|
||||
n.storage_units.loc[stor_units_extend_i, 'p_nom'] = \
|
||||
n_optim.storage_units['p_nom_opt'].reindex(stor_units_extend_i, fill_value=0.)
|
||||
n.storage_units.loc[stor_units_extend_i, 'p_nom_extendable'] = False
|
||||
|
||||
stor_extend_i = n.stores.index[n.stores.e_nom_extendable]
|
||||
n.stores.loc[stor_extend_i, 'e_nom'] = \
|
||||
n_optim.stores['e_nom_opt'].reindex(stor_extend_i, fill_value=0.)
|
||||
n.stores.loc[stor_extend_i, 'e_nom_extendable'] = False
|
||||
|
||||
return n
|
||||
|
||||
@ -104,15 +109,14 @@ if __name__ == "__main__":
|
||||
n = set_parameters_from_optimized(n, n_optim)
|
||||
del n_optim
|
||||
|
||||
config = snakemake.config
|
||||
opts = snakemake.wildcards.opts.split('-')
|
||||
config['solving']['options']['skip_iterations'] = False
|
||||
snakemake.config['solving']['options']['skip_iterations'] = False
|
||||
|
||||
fn = getattr(snakemake.log, 'memory', None)
|
||||
with memory_logger(filename=fn, interval=30.) as mem:
|
||||
n = prepare_network(n, solve_opts=snakemake.config['solving']['options'])
|
||||
n = solve_network(n, config, solver_dir=tmpdir,
|
||||
solver_log=snakemake.log.solver, opts=opts)
|
||||
n = prepare_network(n, snakemake.config['solving']['options'])
|
||||
n = solve_network(n, snakemake.config, opts, solver_dir=tmpdir,
|
||||
solver_logfile=snakemake.log.solver)
|
||||
n.export_to_netcdf(snakemake.output[0])
|
||||
|
||||
logger.info("Maximum memory usage: {}".format(mem.mem_usage))
|
||||
|
@ -2,7 +2,7 @@
|
||||
#
|
||||
# SPDX-License-Identifier: CC0-1.0
|
||||
|
||||
version: 0.3.0
|
||||
version: 0.4.0
|
||||
tutorial: true
|
||||
logging:
|
||||
level: INFO
|
||||
@ -16,11 +16,15 @@ scenario:
|
||||
clusters: [5]
|
||||
opts: [Co2L-24H]
|
||||
|
||||
countries: ['DE']
|
||||
countries: ['BE']
|
||||
|
||||
clustering:
|
||||
simplify:
|
||||
to_substations: false # network is simplified to nodes with positive or negative power injection (i.e. substations or offwind connections)
|
||||
|
||||
snapshots:
|
||||
start: "2013-03-01"
|
||||
end: "2014-04-01"
|
||||
end: "2013-03-08"
|
||||
closed: 'left' # end is not inclusive
|
||||
|
||||
enable:
|
||||
@ -29,8 +33,6 @@ enable:
|
||||
retrieve_cost_data: true
|
||||
build_cutout: false
|
||||
retrieve_cutout: true
|
||||
build_natura_raster: false
|
||||
retrieve_natura_raster: true
|
||||
custom_busmap: false
|
||||
|
||||
electricity:
|
||||
@ -54,15 +56,15 @@ electricity:
|
||||
atlite:
|
||||
nprocesses: 4
|
||||
cutouts:
|
||||
europe-2013-era5-tutorial:
|
||||
be-03-2013-era5:
|
||||
module: era5
|
||||
x: [4., 15.]
|
||||
y: [46., 56.]
|
||||
time: ["2013-03", "2013-03"]
|
||||
time: ["2013-03-01", "2013-03-08"]
|
||||
|
||||
renewable:
|
||||
onwind:
|
||||
cutout: europe-2013-era5-tutorial
|
||||
cutout: be-03-2013-era5
|
||||
resource:
|
||||
method: wind
|
||||
turbine: Vestas_V112_3MW
|
||||
@ -79,7 +81,7 @@ renewable:
|
||||
potential: simple # or conservative
|
||||
clip_p_max_pu: 1.e-2
|
||||
offwind-ac:
|
||||
cutout: europe-2013-era5-tutorial
|
||||
cutout: be-03-2013-era5
|
||||
resource:
|
||||
method: wind
|
||||
turbine: NREL_ReferenceTurbine_5MW_offshore
|
||||
@ -91,7 +93,7 @@ renewable:
|
||||
potential: simple # or conservative
|
||||
clip_p_max_pu: 1.e-2
|
||||
offwind-dc:
|
||||
cutout: europe-2013-era5-tutorial
|
||||
cutout: be-03-2013-era5
|
||||
resource:
|
||||
method: wind
|
||||
turbine: NREL_ReferenceTurbine_5MW_offshore
|
||||
@ -104,7 +106,7 @@ renewable:
|
||||
potential: simple # or conservative
|
||||
clip_p_max_pu: 1.e-2
|
||||
solar:
|
||||
cutout: europe-2013-era5-tutorial
|
||||
cutout: be-03-2013-era5
|
||||
resource:
|
||||
method: pv
|
||||
panel: CSi
|
||||
@ -146,7 +148,6 @@ transformers:
|
||||
type: ''
|
||||
|
||||
load:
|
||||
url: https://data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv
|
||||
power_statistics: True # only for files from <2019; set false in order to get ENTSOE transparency data
|
||||
interpolate_limit: 3 # data gaps up until this size are interpolated linearly
|
||||
time_shift_for_large_gaps: 1w # data gaps up until this size are copied by copying from
|
||||
@ -201,8 +202,8 @@ solving:
|
||||
# threads: 4
|
||||
# lpmethod: 4 # barrier
|
||||
# solutiontype: 2 # non basic solution, ie no crossover
|
||||
# barrier_convergetol: 1.e-5
|
||||
# feasopt_tolerance: 1.e-6
|
||||
# barrier.convergetol: 1.e-5
|
||||
# feasopt.tolerance: 1.e-6
|
||||
|
||||
plotting:
|
||||
map:
|
||||
|
Loading…
Reference in New Issue
Block a user