[pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci
This commit is contained in:
pre-commit-ci[bot] 2023-01-06 17:46:04 +00:00
parent 1ebe53cdac
commit acc6ee6bfe
5 changed files with 81 additions and 44 deletions

View File

@ -92,11 +92,11 @@ atlite:
# base:
# module: era5
europe-2013-era5:
module: era5 # in priority order
module: era5 # in priority order
dx: 0.3
dy: 0.3
europe-2013-sarah:
module: [sarah, era5] # in priority order
module: [sarah, era5] # in priority order
dx: 0.2
dy: 0.2
sarah_interpolate: false

View File

@ -265,9 +265,13 @@ def manual_adjustment(load, fn_load, powerstatistics, countries):
load, "BG", "2018-10-27 21:00", "2018-10-28 22:00", Delta(weeks=1)
)
if 'UA' in countries:
copy_timeslice(load, 'UA', '2013-01-25 14:00', '2013-01-28 21:00', Delta(weeks=1))
copy_timeslice(load, 'UA', '2013-10-28 03:00', '2013-10-28 20:00', Delta(weeks=1))
if "UA" in countries:
copy_timeslice(
load, "UA", "2013-01-25 14:00", "2013-01-28 21:00", Delta(weeks=1)
)
copy_timeslice(
load, "UA", "2013-10-28 03:00", "2013-10-28 20:00", Delta(weeks=1)
)
return load
@ -291,14 +295,14 @@ if __name__ == "__main__":
load = load_timeseries(snakemake.input[0], years, countries, powerstatistics)
# attach load of UA (best data only for entsoe transparency)
load_ua = load_timeseries(snakemake.input[0], '2018', ['UA'], False)
load_ua = load_timeseries(snakemake.input[0], "2018", ["UA"], False)
snapshot_year = str(snapshots.year.unique().item())
time_diff = pd.Timestamp('2018') - pd.Timestamp(snapshot_year)
load_ua.index -= time_diff # hack indices (currently, UA is manually set to 2018)
load['UA'] = load_ua
time_diff = pd.Timestamp("2018") - pd.Timestamp(snapshot_year)
load_ua.index -= time_diff # hack indices (currently, UA is manually set to 2018)
load["UA"] = load_ua
# attach load of MD (no time-series available, use 2020-totals and distribute according to UA):
# https://www.iea.org/data-and-statistics/data-browser/?country=MOLDOVA&fuel=Energy%20consumption&indicator=TotElecCons
load['MD'] = 6.2e6*(load_ua/load_ua.sum())
load["MD"] = 6.2e6 * (load_ua / load_ua.sum())
if snakemake.config["load"]["manual_adjustments"]:
load = manual_adjustment(load, snakemake.input[0], powerstatistics, countries)

View File

@ -288,9 +288,11 @@ if __name__ == "__main__":
# For Moldova and Ukraine: Overwrite parts not covered by Corine with
# externally determined available areas
if "availability_matrix_MD_UA" in snakemake.input.keys():
availability_MDUA = xr.open_dataarray(snakemake.input["availability_matrix_MD_UA"])
availability_MDUA = xr.open_dataarray(
snakemake.input["availability_matrix_MD_UA"]
)
availability.loc[availability_MDUA.coords] = availability_MDUA
area = cutout.grid.to_crs(3035).area / 1e6
area = xr.DataArray(
area.values.reshape(cutout.shape), [cutout.coords["y"], cutout.coords["x"]]

View File

@ -22,91 +22,120 @@
"\n",
"logger = logging.getLogger(__name__)\n",
"\n",
"if __name__ == '__main__':\n",
" if 'snakemake' not in globals():\n",
"if __name__ == \"__main__\":\n",
" if \"snakemake\" not in globals():\n",
" from _helpers import mock_snakemake\n",
" snakemake = mock_snakemake('determine_availability_matrix_MD_UA', technology='solar')\n",
"\n",
" snakemake = mock_snakemake(\n",
" \"determine_availability_matrix_MD_UA\", technology=\"solar\"\n",
" )\n",
" configure_logging(snakemake)\n",
" pgb.streams.wrap_stderr()\n",
"\n",
" nprocesses = snakemake.config['atlite'].get('nprocesses')\n",
" noprogress = not snakemake.config['atlite'].get('show_progress', True)\n",
" config = snakemake.config['renewable'][snakemake.wildcards.technology]\n",
" nprocesses = snakemake.config[\"atlite\"].get(\"nprocesses\")\n",
" noprogress = not snakemake.config[\"atlite\"].get(\"show_progress\", True)\n",
" config = snakemake.config[\"renewable\"][snakemake.wildcards.technology]\n",
"\n",
" cutout = atlite.Cutout(snakemake.input['cutout'])\n",
" regions = gpd.read_file(snakemake.input.regions).set_index('name').rename_axis('bus')\n",
" cutout = atlite.Cutout(snakemake.input[\"cutout\"])\n",
" regions = (\n",
" gpd.read_file(snakemake.input.regions).set_index(\"name\").rename_axis(\"bus\")\n",
" )\n",
" buses = regions.index\n",
"\n",
" excluder = atlite.ExclusionContainer(crs=3035, res=100)\n",
"\n",
" corine = config.get(\"corine\", {})\n",
" if \"grid_codes\" in corine:\n",
" \n",
"\n",
" # Land cover codes to emulate CORINE results\n",
" if snakemake.wildcards.technology == \"solar\":\n",
" codes = [20, 30, 40, 50, 60, 90, 100]\n",
" elif snakemake.wildcards.technology == \"onwind\":\n",
" codes = [20, 30, 40, 60, 100, 111, 112, 113, 114, 115, 116, 121, 122, 123, 124, 125, 126]\n",
" codes = [\n",
" 20,\n",
" 30,\n",
" 40,\n",
" 60,\n",
" 100,\n",
" 111,\n",
" 112,\n",
" 113,\n",
" 114,\n",
" 115,\n",
" 116,\n",
" 121,\n",
" 122,\n",
" 123,\n",
" 124,\n",
" 125,\n",
" 126,\n",
" ]\n",
" elif snakemake.wildcards.technology == \"offshore-ac\":\n",
" codes = [80, 200]\n",
" elif snakemake.wildcards.technology == \"offshore-dc\":\n",
" codes = [80, 200]\n",
" else:\n",
" assert False, \"technology not supported\"\n",
" \n",
" excluder.add_raster(snakemake.input.copernicus, codes=codes, invert=True, crs=\"EPSG:4326\")\n",
" if \"distance\" in corine and corine.get(\"distance\", 0.) > 0.:\n",
"\n",
" excluder.add_raster(\n",
" snakemake.input.copernicus, codes=codes, invert=True, crs=\"EPSG:4326\"\n",
" )\n",
" if \"distance\" in corine and corine.get(\"distance\", 0.0) > 0.0:\n",
" # Land cover codes to emulate CORINE results\n",
" if snakemake.wildcards.technology == \"onwind\":\n",
" codes = [50]\n",
" else:\n",
" assert False, \"technology not supported\"\n",
" \n",
"\n",
" buffer = corine[\"distance\"]\n",
" excluder.add_raster(snakemake.input.copernicus, codes=codes, buffer=buffer, crs=\"EPSG:4326\")\n",
" excluder.add_raster(\n",
" snakemake.input.copernicus, codes=codes, buffer=buffer, crs=\"EPSG:4326\"\n",
" )\n",
"\n",
" if \"max_depth\" in config:\n",
" # lambda not supported for atlite + multiprocessing\n",
" # use named function np.greater with partially frozen argument instead\n",
" # and exclude areas where: -max_depth > grid cell depth\n",
" func = functools.partial(np.greater,-config['max_depth'])\n",
" func = functools.partial(np.greater, -config[\"max_depth\"])\n",
" excluder.add_raster(snakemake.input.gebco, codes=func, crs=4236, nodata=-1000)\n",
"\n",
" if 'min_shore_distance' in config:\n",
" buffer = config['min_shore_distance']\n",
" if \"min_shore_distance\" in config:\n",
" buffer = config[\"min_shore_distance\"]\n",
" excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer)\n",
"\n",
" if 'max_shore_distance' in config:\n",
" buffer = config['max_shore_distance']\n",
" excluder.add_geometry(snakemake.input.country_shapes, buffer=buffer, invert=True)\n",
" if \"max_shore_distance\" in config:\n",
" buffer = config[\"max_shore_distance\"]\n",
" excluder.add_geometry(\n",
" snakemake.input.country_shapes, buffer=buffer, invert=True\n",
" )\n",
"\n",
" kwargs = dict(nprocesses=nprocesses, disable_progressbar=noprogress)\n",
" if noprogress:\n",
" logger.info('Calculate landuse availabilities...')\n",
" logger.info(\"Calculate landuse availabilities...\")\n",
" start = time.time()\n",
" availability = cutout.availabilitymatrix(regions, excluder, **kwargs)\n",
" duration = time.time() - start\n",
" logger.info(f'Completed availability calculation ({duration:2.2f}s)')\n",
" logger.info(f\"Completed availability calculation ({duration:2.2f}s)\")\n",
" else:\n",
" availability = cutout.availabilitymatrix(regions, excluder, **kwargs)\n",
"\n",
" # Limit results only to buses for UA and MD\n",
" buses = regions.loc[regions[\"country\"].isin([\"UA\",\"MD\"])].index.values\n",
" buses = regions.loc[regions[\"country\"].isin([\"UA\", \"MD\"])].index.values\n",
" availability = availability.sel(bus=buses)\n",
" \n",
"\n",
" # Save and plot for verification\n",
" availability.to_netcdf(snakemake.output[\"availability_matrix\"])\n",
" #availability.sum(dim=\"bus\").plot()\n",
" #plt.title(technology)\n",
" #plt.show()"
" # availability.sum(dim=\"bus\").plot()\n",
" # plt.title(technology)\n",
" # plt.show()"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"display_name": "",
"language": "python",
"name": "python3"
"name": ""
},
"language_info": {
"codemirror_mode": {

View File

@ -395,8 +395,10 @@ def simplify_links(n, costs, config, output, aggregation_strategies=dict()):
def remove_stubs(n, costs, config, output, aggregation_strategies=dict()):
logger.info("Removing stubs")
across_borders = config["clustering"]["simplify_network"].get("remove_stubs_across_borders", True)
matching_attrs = [] if across_borders else ['country']
across_borders = config["clustering"]["simplify_network"].get(
"remove_stubs_across_borders", True
)
matching_attrs = [] if across_borders else ["country"]
busmap = busmap_by_stubs(n, matching_attrs)
connection_costs_to_bus = _compute_connection_costs_to_bus(n, busmap, costs, config)