Merge branch 'master' into validation
This commit is contained in:
commit
269a08006f
53
.github/workflows/ci.yaml
vendored
53
.github/workflows/ci.yaml
vendored
@ -19,7 +19,6 @@ on:
|
||||
- cron: "0 5 * * TUE"
|
||||
|
||||
env:
|
||||
CONDA_CACHE_NUMBER: 1 # Change this value to manually reset the environment cache
|
||||
DATA_CACHE_NUMBER: 2
|
||||
|
||||
jobs:
|
||||
@ -27,22 +26,12 @@ jobs:
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
max-parallel: 3
|
||||
matrix:
|
||||
include:
|
||||
# Matrix required to handle caching with Mambaforge
|
||||
- os: ubuntu-latest
|
||||
label: ubuntu-latest
|
||||
prefix: /usr/share/miniconda3/envs/pypsa-eur
|
||||
|
||||
- os: macos-latest
|
||||
label: macos-latest
|
||||
prefix: /Users/runner/miniconda3/envs/pypsa-eur
|
||||
|
||||
- os: windows-latest
|
||||
label: windows-latest
|
||||
prefix: C:\Miniconda3\envs\pypsa-eur
|
||||
|
||||
name: ${{ matrix.label }}
|
||||
os:
|
||||
- ubuntu-latest
|
||||
- macos-latest
|
||||
- windows-latest
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
@ -60,24 +49,25 @@ jobs:
|
||||
- name: Add solver to environment
|
||||
run: |
|
||||
echo -e "- glpk\n- ipopt<3.13.3" >> envs/environment.yaml
|
||||
if: ${{ matrix.label }} == 'windows-latest'
|
||||
if: ${{ matrix.os }} == 'windows-latest'
|
||||
|
||||
- name: Add solver to environment
|
||||
run: |
|
||||
echo -e "- glpk\n- ipopt" >> envs/environment.yaml
|
||||
if: ${{ matrix.label }} != 'windows-latest'
|
||||
if: ${{ matrix.os }} != 'windows-latest'
|
||||
|
||||
- name: Setup Mambaforge
|
||||
uses: conda-incubator/setup-miniconda@v2
|
||||
- name: Setup micromamba
|
||||
uses: mamba-org/setup-micromamba@v1
|
||||
with:
|
||||
miniforge-variant: Mambaforge
|
||||
miniforge-version: latest
|
||||
activate-environment: pypsa-eur
|
||||
use-mamba: true
|
||||
micromamba-version: latest
|
||||
environment-file: envs/environment.yaml
|
||||
log-level: debug
|
||||
init-shell: bash
|
||||
cache-environment: true
|
||||
cache-downloads: true
|
||||
|
||||
- name: Set cache dates
|
||||
run: |
|
||||
echo "DATE=$(date +'%Y%m%d')" >> $GITHUB_ENV
|
||||
echo "WEEK=$(date +'%Y%U')" >> $GITHUB_ENV
|
||||
|
||||
- name: Cache data and cutouts folders
|
||||
@ -88,21 +78,8 @@ jobs:
|
||||
cutouts
|
||||
key: data-cutouts-${{ env.WEEK }}-${{ env.DATA_CACHE_NUMBER }}
|
||||
|
||||
- name: Create environment cache
|
||||
uses: actions/cache@v3
|
||||
id: cache
|
||||
with:
|
||||
path: ${{ matrix.prefix }}
|
||||
key: ${{ matrix.label }}-conda-${{ env.DATE }}-${{ env.CONDA_CACHE_NUMBER }}
|
||||
|
||||
- name: Update environment due to outdated or unavailable cache
|
||||
run: mamba env update -n pypsa-eur -f envs/environment.yaml
|
||||
if: steps.cache.outputs.cache-hit != 'true'
|
||||
|
||||
- name: Test snakemake workflow
|
||||
run: |
|
||||
conda activate pypsa-eur
|
||||
conda list
|
||||
snakemake -call solve_elec_networks --configfile config/test/config.electricity.yaml --rerun-triggers=mtime
|
||||
snakemake -call all --configfile config/test/config.overnight.yaml --rerun-triggers=mtime
|
||||
snakemake -call all --configfile config/test/config.myopic.yaml --rerun-triggers=mtime
|
||||
|
@ -87,6 +87,6 @@ repos:
|
||||
|
||||
# Check for FSFE REUSE compliance (licensing)
|
||||
- repo: https://github.com/fsfe/reuse-tool
|
||||
rev: v2.0.0
|
||||
rev: v2.1.0
|
||||
hooks:
|
||||
- id: reuse
|
||||
|
@ -539,7 +539,7 @@ industry:
|
||||
# docs in https://pypsa-eur.readthedocs.io/en/latest/configuration.html#costs
|
||||
costs:
|
||||
year: 2030
|
||||
version: v0.5.0
|
||||
version: v0.6.0
|
||||
rooftop_share: 0.14 # based on the potentials, assuming (0.1 kW/m2 and 10 m2/person)
|
||||
fill_values:
|
||||
FOM: 0
|
||||
|
@ -224,7 +224,10 @@ The included ``.nc`` files are PyPSA network files which can be imported with Py
|
||||
n = pypsa.Network(filename)
|
||||
|
||||
|
||||
Operating Systems
|
||||
=================
|
||||
|
||||
The PyPSA-Eur workflow is continuously tested for Linux, macOS and Windows (WSL only).
|
||||
|
||||
|
||||
.. toctree::
|
||||
|
@ -280,13 +280,13 @@ def add_EQ_constraints(n, o, scaling=1e-1):
|
||||
float_regex = "[0-9]*\.?[0-9]+"
|
||||
level = float(re.findall(float_regex, o)[0])
|
||||
if o[-1] == "c":
|
||||
ggrouper = n.generators.bus.map(n.buses.country).to_xarray()
|
||||
lgrouper = n.loads.bus.map(n.buses.country).to_xarray()
|
||||
sgrouper = n.storage_units.bus.map(n.buses.country).to_xarray()
|
||||
ggrouper = n.generators.bus.map(n.buses.country)
|
||||
lgrouper = n.loads.bus.map(n.buses.country)
|
||||
sgrouper = n.storage_units.bus.map(n.buses.country)
|
||||
else:
|
||||
ggrouper = n.generators.bus.to_xarray()
|
||||
lgrouper = n.loads.bus.to_xarray()
|
||||
sgrouper = n.storage_units.bus.to_xarray()
|
||||
ggrouper = n.generators.bus
|
||||
lgrouper = n.loads.bus
|
||||
sgrouper = n.storage_units.bus
|
||||
load = (
|
||||
n.snapshot_weightings.generators
|
||||
@ n.loads_t.p_set.groupby(lgrouper, axis=1).sum()
|
||||
@ -300,7 +300,7 @@ def add_EQ_constraints(n, o, scaling=1e-1):
|
||||
p = n.model["Generator-p"]
|
||||
lhs_gen = (
|
||||
(p * (n.snapshot_weightings.generators * scaling))
|
||||
.groupby(ggrouper)
|
||||
.groupby(ggrouper.to_xarray())
|
||||
.sum()
|
||||
.sum("snapshot")
|
||||
)
|
||||
@ -309,7 +309,7 @@ def add_EQ_constraints(n, o, scaling=1e-1):
|
||||
spillage = n.model["StorageUnit-spill"]
|
||||
lhs_spill = (
|
||||
(spillage * (-n.snapshot_weightings.stores * scaling))
|
||||
.groupby(sgrouper)
|
||||
.groupby(sgrouper.to_xarray())
|
||||
.sum()
|
||||
.sum("snapshot")
|
||||
)
|
||||
|
Loading…
Reference in New Issue
Block a user