Merge branch 'master' into rename-existing-capacities
This commit is contained in:
commit
6da50bf25a
@ -67,7 +67,7 @@ repos:
|
||||
|
||||
# Do YAML formatting (before the linter checks it for misses)
|
||||
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
||||
rev: v2.12.0
|
||||
rev: v2.13.0
|
||||
hooks:
|
||||
- id: pretty-format-yaml
|
||||
args: [--autofix, --indent, "2", --preserve-quotes]
|
||||
|
@ -786,7 +786,6 @@ solving:
|
||||
PreDual: 0
|
||||
GURO_PAR_BARDENSETHRESH: 200
|
||||
gurobi-numeric-focus:
|
||||
name: gurobi
|
||||
NumericFocus: 3 # Favour numeric stability over speed
|
||||
method: 2 # barrier
|
||||
crossover: 0 # do not use crossover
|
||||
@ -798,7 +797,6 @@ solving:
|
||||
threads: 8
|
||||
Seed: 123
|
||||
gurobi-fallback: # Use gurobi defaults
|
||||
name: gurobi
|
||||
crossover: 0
|
||||
method: 2 # barrier
|
||||
BarHomogeneous: 1 # Use homogeneous barrier if standard does not converge
|
||||
|
@ -10,6 +10,8 @@ Release Notes
|
||||
Upcoming Release
|
||||
================
|
||||
|
||||
* bugfix: convert Strings to pathlib.Path objects as input to ConfigSettings
|
||||
|
||||
* Allow the use of more solvers in clustering (Xpress, COPT, Gurobi, CPLEX, SCIP, MOSEK).
|
||||
|
||||
* Enhanced support for choosing different weather years
|
||||
@ -162,7 +164,11 @@ Upcoming Release
|
||||
|
||||
* Adapt the disabling of transmission expansion in myopic foresight optimisations when limit is already reached to also handle cost limits.
|
||||
|
||||
* Fix duplicated years in `add_land_use_constraint_m`.
|
||||
* Fix duplicated years and grouping years reference in `add_land_use_constraint_m`.
|
||||
|
||||
* Fix type error with `m` option in `cluster_network`.
|
||||
|
||||
* Fix error with `symbol` of `buses` in `simplify_network`.
|
||||
|
||||
* Fix index of existing capacities in `add_power_capacities_installed_before_baseyear` with `m` option.
|
||||
|
||||
|
@ -44,7 +44,7 @@ if config["foresight"] != "perfect":
|
||||
benchmark:
|
||||
(
|
||||
RESULTS
|
||||
+ "benchmarksplot_power_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
||||
+ "benchmarks/plot_power_network/elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}"
|
||||
)
|
||||
conda:
|
||||
"../envs/environment.yaml"
|
||||
|
@ -35,7 +35,8 @@ rule add_existing_baseyear:
|
||||
planning_horizons=config["scenario"]["planning_horizons"][0], #only applies to baseyear
|
||||
threads: 1
|
||||
resources:
|
||||
mem_mb=2000,
|
||||
mem_mb=config_provider("solving", "mem_mb"),
|
||||
runtime=config_provider("solving", "runtime", default="24h"),
|
||||
log:
|
||||
logs(
|
||||
"add_existing_baseyear_elec_s{simpl}_{clusters}_l{ll}_{opts}_{sector_opts}_{planning_horizons}.log"
|
||||
|
@ -43,10 +43,11 @@ def get_scenarios(run):
|
||||
scenario_config = run.get("scenarios", {})
|
||||
if run["name"] and scenario_config.get("enable"):
|
||||
fn = Path(scenario_config["file"])
|
||||
scenarios = yaml.safe_load(fn.read_text())
|
||||
if run["name"] == "all":
|
||||
run["name"] = list(scenarios.keys())
|
||||
return scenarios
|
||||
if fn.exists():
|
||||
scenarios = yaml.safe_load(fn.read_text())
|
||||
if run["name"] == "all":
|
||||
run["name"] = list(scenarios.keys())
|
||||
return scenarios
|
||||
return {}
|
||||
|
||||
|
||||
@ -106,7 +107,7 @@ def get_run_path(fn, dir, rdir, shared_resources):
|
||||
elif isinstance(shared_resources, str):
|
||||
rdir = shared_resources + "/"
|
||||
elif isinstance(shared_resources, bool):
|
||||
rdir = ""
|
||||
rdir = "" if shared_resources else rdir
|
||||
else:
|
||||
raise ValueError(
|
||||
"shared_resources must be a boolean, str, or 'base' for special handling."
|
||||
@ -426,7 +427,7 @@ def mock_snakemake(
|
||||
configfiles = [configfiles]
|
||||
|
||||
resource_settings = ResourceSettings()
|
||||
config_settings = ConfigSettings(configfiles=configfiles)
|
||||
config_settings = ConfigSettings(configfiles=map(Path, configfiles))
|
||||
workflow_settings = WorkflowSettings()
|
||||
storage_settings = StorageSettings()
|
||||
dag_settings = DAGSettings(rerun_triggers=[])
|
||||
|
@ -231,7 +231,7 @@ def distribute_clusters(n, n_clusters, focus_weights=None, solver_name="scip"):
|
||||
.pipe(normed)
|
||||
)
|
||||
|
||||
N = n.buses.groupby(["country", "sub_network"]).size()
|
||||
N = n.buses.groupby(["country", "sub_network"]).size()[L.index]
|
||||
|
||||
assert (
|
||||
n_clusters >= len(N) and n_clusters <= N.sum()
|
||||
@ -454,7 +454,7 @@ if __name__ == "__main__":
|
||||
if "snakemake" not in globals():
|
||||
from _helpers import mock_snakemake
|
||||
|
||||
snakemake = mock_snakemake("cluster_network", simpl="", clusters="5")
|
||||
snakemake = mock_snakemake("cluster_network", simpl="", clusters="40")
|
||||
configure_logging(snakemake)
|
||||
set_scenario_config(snakemake)
|
||||
|
||||
@ -471,7 +471,7 @@ if __name__ == "__main__":
|
||||
conventional_carriers = set(params.conventional_carriers)
|
||||
if snakemake.wildcards.clusters.endswith("m"):
|
||||
n_clusters = int(snakemake.wildcards.clusters[:-1])
|
||||
aggregate_carriers = params.conventional_carriers & aggregate_carriers
|
||||
aggregate_carriers = conventional_carriers & aggregate_carriers
|
||||
elif snakemake.wildcards.clusters.endswith("c"):
|
||||
n_clusters = int(snakemake.wildcards.clusters[:-1])
|
||||
aggregate_carriers = aggregate_carriers - conventional_carriers
|
||||
|
@ -594,18 +594,6 @@ if __name__ == "__main__":
|
||||
)
|
||||
busmaps.append(busmap_hac)
|
||||
|
||||
if snakemake.wildcards.simpl:
|
||||
n, cluster_map = cluster(
|
||||
n,
|
||||
int(snakemake.wildcards.simpl),
|
||||
params.focus_weights,
|
||||
solver_name,
|
||||
params.simplify_network["algorithm"],
|
||||
params.simplify_network["feature"],
|
||||
params.aggregation_strategies,
|
||||
)
|
||||
busmaps.append(cluster_map)
|
||||
|
||||
# some entries in n.buses are not updated in previous functions, therefore can be wrong. as they are not needed
|
||||
# and are lost when clustering (for example with the simpl wildcard), we remove them for consistency:
|
||||
remove = [
|
||||
@ -621,6 +609,18 @@ if __name__ == "__main__":
|
||||
n.buses.drop(remove, axis=1, inplace=True, errors="ignore")
|
||||
n.lines.drop(remove, axis=1, errors="ignore", inplace=True)
|
||||
|
||||
if snakemake.wildcards.simpl:
|
||||
n, cluster_map = cluster(
|
||||
n,
|
||||
int(snakemake.wildcards.simpl),
|
||||
params.focus_weights,
|
||||
solver_name,
|
||||
params.simplify_network["algorithm"],
|
||||
params.simplify_network["feature"],
|
||||
params.aggregation_strategies,
|
||||
)
|
||||
busmaps.append(cluster_map)
|
||||
|
||||
update_p_nom_max(n)
|
||||
|
||||
n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards)))
|
||||
|
@ -155,7 +155,7 @@ def _add_land_use_constraint(n):
|
||||
def _add_land_use_constraint_m(n, planning_horizons, config):
|
||||
# if generators clustering is lower than network clustering, land_use accounting is at generators clusters
|
||||
|
||||
grouping_years = config["existing_capacities"]["grouping_years"]
|
||||
grouping_years = config["existing_capacities"]["grouping_years_power"]
|
||||
current_horizon = snakemake.wildcards.planning_horizons
|
||||
|
||||
for carrier in ["solar", "onwind", "offwind-ac", "offwind-dc"]:
|
||||
|
Loading…
Reference in New Issue
Block a user