{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import pypsa\n", "import numpy as np\n", "import pandas as pd\n", "import os\n", "from pathlib import Path\n", "import matplotlib.pyplot as plt\n", "\n", "plt.style.use(\"ggplot\")\n", "import pycountry\n", "import json\n", "import warnings\n", "\n", "warnings.filterwarnings(\"ignore\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "available_models = {\n", " \"model_1\": \"elec_s_37_ec_lv1.0_.nc\",\n", " \"model_2\": \"elec_s_37_ec_lv1.0_3H_withUC.nc\",\n", " \"model_3\": \"elec_s_37_ec_lv1.0_Co2L-noUC-noCo2price.nc\",\n", " \"model_4\": \"elec_s_37_ec_lv1.0_Ep.nc\",\n", " \"model_5\": \"elec_s_37_ec_lv1.0_Ep_new.nc\",\n", "}\n", "\n", "data_path = Path.cwd() / \"..\" / \"..\"\n", "model_path = data_path / available_models[\"model_5\"]\n", "\n", "with open(data_path / \"generation_data\" / \"generation_mapper_pypsa.json\", \"r\") as f:\n", " pypsa_generation_mapper = json.load(f)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "n = pypsa.Network(str(model_path))" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def intersection(alist, blist):\n", " for val in alist:\n", " if val not in blist:\n", " alist.remove(val)\n", " return alist" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "pypsa_generation_mapper" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "countries = set([col[:2] for col in n.generators_t.p.columns])\n", "gen = set([col[6:] for col in n.generators_t.p.columns])\n", "\n", "for i, country in enumerate(countries):\n", " df = pd.DataFrame(index=n.generators_t.p.index)\n", " # country_generation = [col for col in n.generators_t.p.columns if col.startswith(country)]\n", " country_generation = n.generators.loc[n.generators.bus.str.contains(country)]\n", "\n", " for key, gens in pypsa_generation_mapper.items():\n", " # curr_gen = country_generation.loc[\n", " # (country_generation.carrier.str.contains(tech) for tech in gens).astype(bool)].index\n", " curr_gen = country_generation.loc[\n", " country_generation.carrier.apply(lambda carr: carr in gens)\n", " ].index\n", "\n", " if len(curr_gen):\n", " df[key] = n.generators_t.p[curr_gen].mean(axis=1)\n", " else:\n", " df[key] = np.zeros(len(df))\n", "\n", " df.to_csv(data_path / \"pypsa_data\" / (country + \".csv\"))" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import seaborn as sns\n", "\n", "\n", "for num, country in enumerate(os.listdir(data_path / \"pypsa_data\")):\n", " # country = \"DE.csv\"\n", " cc = country[:2]\n", "\n", " pypsa_df = pd.read_csv(\n", " data_path / \"pypsa_data\" / country, parse_dates=True, index_col=0\n", " )\n", " try:\n", " entsoe_df = pd.read_csv(\n", " data_path / \"harmonised_generation_data\" / (\"prepared_\" + country),\n", " parse_dates=True,\n", " index_col=0,\n", " )\n", "\n", " entsoe_df.columns = [col[:-6] for col in entsoe_df.columns]\n", " except FileNotFoundError:\n", " continue\n", "\n", " fig, axs = plt.subplots(3, 3, figsize=(20, 15))\n", "\n", " axs[0, 0].set_title(pycountry.countries.get(alpha_2=country[:2]).name)\n", "\n", " start = pd.Timestamp(\"2019-01-01\") # for small time frame\n", " end = pd.Timestamp(\"2019-01-14\")\n", " coarse_freq = \"d\"\n", "\n", " num_techs_shown = 6\n", "\n", " links = n.links.loc[\n", " (n.links.bus0.str.contains(cc) + n.links.bus1.str.contains(cc)).astype(bool)\n", " ]\n", " links = links.loc[links.carrier == \"DC\"].sum(axis=1)\n", "\n", " from_here = n.links.loc[links.index].bus0.str.contains(cc)\n", " to_here = n.links.loc[links.index].bus1.str.contains(cc)\n", "\n", " pypsa_df[\"Import Export\"] = pd.concat(\n", " (n.links_t.p0[from_here.index], n.links_t.p0[to_here.index]), axis=1\n", " ).sum(axis=1)\n", "\n", " # show_techs = pypsa_df.sum().sort_values(ascending=False).iloc[:num_techs_shown].index.tolist()\n", " show_techs = (\n", " entsoe_df.sum()\n", " .sort_values(ascending=False)\n", " .iloc[:num_techs_shown]\n", " .index.tolist()\n", " )\n", "\n", " entsoe_df[intersection(show_techs, entsoe_df.columns.tolist())].loc[start:end].plot(\n", " ax=axs[0, 0]\n", " )\n", " pypsa_df[show_techs].loc[start:end].plot(ax=axs[0, 1], legend=False)\n", "\n", " pypsa_load = n.loads_t.p_set\n", " pypsa_load = pypsa_load[\n", " [col for col in pypsa_load.columns if col.startswith(country[:2])]\n", " ].mean(axis=1)\n", "\n", " pypsa_load.loc[start:end].plot(ax=axs[0, 2])\n", "\n", " axs[0, 0].set_ylabel(\"ENTSOE Generation\")\n", " axs[0, 1].set_ylabel(\"PyPSA Generation\")\n", " axs[0, 2].set_ylabel(\"PyPSA Load\")\n", "\n", " upper_lim = pd.concat((pypsa_df, entsoe_df), axis=0).max().max()\n", " for ax in axs[0, :2]:\n", " ax.set_ylim(0, upper_lim)\n", "\n", " for ax in axs[0, :2]:\n", " ax.legend()\n", "\n", " # entsoe_df[[col+\" (MWh)\" for col in pypsa_df.columns]].loc[start:end].plot(ax=axs[0])\n", " entsoe_df[intersection(show_techs, entsoe_df.columns.tolist())].resample(\n", " coarse_freq\n", " ).mean().plot(ax=axs[1, 0])\n", " pypsa_df[show_techs].resample(coarse_freq).mean().plot(ax=axs[1, 1], legend=False)\n", "\n", " pypsa_load = n.loads_t.p_set\n", " pypsa_load = pypsa_load[\n", " [col for col in pypsa_load.columns if col.startswith(country[:2])]\n", " ].mean(axis=1)\n", " pypsa_load.resample(coarse_freq).sum().plot(ax=axs[1, 2])\n", "\n", " # pypsa_df[show_techs].resample(coarse_freq).mean().sum(axis=1).plot(ax=axs[1,2], legend=False)\n", "\n", " axs[1, 0].set_ylabel(\"ENTSOE Generation\")\n", " axs[1, 1].set_ylabel(\"PyPSA Generation\")\n", " axs[1, 2].set_ylabel(\"PyPSA Load\")\n", "\n", " upper_lim = (\n", " pd.concat(\n", " (\n", " pypsa_df.resample(coarse_freq).mean(),\n", " entsoe_df.resample(coarse_freq).mean(),\n", " ),\n", " axis=0,\n", " )\n", " .max()\n", " .max()\n", " )\n", " for ax in axs[1, :2]:\n", " ax.set_ylim(0, upper_lim)\n", "\n", " for ax in axs[1, :2]:\n", " ax.legend()\n", "\n", " try:\n", " entsoe_prices = pd.read_csv(\n", " data_path / \"price_data\" / country,\n", " index_col=0,\n", " parse_dates=True,\n", " )\n", "\n", " def make_tz_time(time):\n", " return pd.Timestamp(time).tz_convert(\"utc\")\n", "\n", " entsoe_prices.index = pd.Series(entsoe_prices.index).apply(\n", " lambda time: make_tz_time(time)\n", " )\n", " entsoe_prices.resample(\"3d\").mean().plot(ax=axs[2, 0])\n", "\n", " except FileNotFoundError:\n", " pass\n", "\n", " prices_col = [\n", " col for col in n.buses_t.marginal_price.columns if col.startswith(country[:2])\n", " ]\n", " pypsa_prices = n.buses_t.marginal_price[prices_col].mean(axis=1)\n", " pypsa_prices.resample(\"3d\").mean().plot(ax=axs[2, 1])\n", "\n", " upper_lim = pd.concat((entsoe_prices, pypsa_prices), axis=0).max().max()\n", " for ax in axs[2, :2]:\n", " ax.set_ylim(0, upper_lim)\n", "\n", " axs[2, 0].set_ylabel(\"ENTSOE Day Ahead Prices\")\n", " axs[2, 1].set_ylabel(\"PyPSA Shadow Prices\")\n", "\n", " pypsa_totals = pypsa_df.sum()\n", "\n", " entsoe_totals = entsoe_df.sum()\n", " totals = pd.DataFrame(index=pypsa_totals.index)\n", "\n", " totals[\"pypsa\"] = pypsa_totals\n", " totals[\"entsoe\"] = entsoe_totals\n", " entsoe_totals.loc[\"Import Export\"] = 0.0\n", " totals[\"tech\"] = totals.index\n", "\n", " totals = pd.concat(\n", " [\n", " pd.DataFrame(\n", " {\n", " \"kind\": [\"pypsa\" for _ in range(len(pypsa_totals))],\n", " \"tech\": pypsa_totals.index,\n", " \"total generation\": pypsa_totals.values,\n", " }\n", " ),\n", " pd.DataFrame(\n", " {\n", " \"kind\": [\"entsoe\" for _ in range(len(entsoe_totals))],\n", " \"tech\": entsoe_totals.index,\n", " \"total generation\": entsoe_totals.values,\n", " }\n", " ),\n", " ],\n", " axis=0,\n", " )\n", "\n", " sns.barplot(\n", " data=totals,\n", " x=\"tech\",\n", " y=\"total generation\",\n", " hue=\"kind\",\n", " ax=axs[2, 2],\n", " palette=\"dark\",\n", " alpha=0.6,\n", " edgecolor=\"k\",\n", " )\n", " axs[2, 2].set_ylabel(\"Total Generation\")\n", " axs[2, 2].set_xticks(\n", " axs[2, 2].get_xticks(), axs[2, 2].get_xticklabels(), rotation=45, ha=\"right\"\n", " )\n", "\n", " plt.tight_layout()\n", " plt.show()\n", "\n", " if num == 7:\n", " break\n", "\n", "\n", "# df.to_csv(data_path / \"pypsa_data\" / (country+\".csv\"))" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "links = n.links.loc[\n", " (n.links.bus0.str.contains(\"DE\") + n.links.bus1.str.contains(\"DE\")).astype(bool)\n", "]\n", "links = links.loc[links.carrier == \"DC\"].sum(axis=1)\n", "n.links_t.p0[links.index.tolist()].resample(\"w\").sum().plot()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "german_origin = n.links.loc[links.index].bus0.str.contains(\"DE\")\n", "german_destin = n.links.loc[links.index].bus1.str.contains(\"DE\")\n", "\n", "net_impexp = pd.concat(\n", " (n.links_t.p0[german_origin.index], n.links_t.p0[german_destin.index]), axis=1\n", ").sum(axis=1)\n", "\n", "net_impexp.iloc[:200].plot()" ] } ], "metadata": { "kernelspec": { "display_name": "", "language": "python", "name": "" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.8.0" }, "orig_nbformat": 4 }, "nbformat": 4, "nbformat_minor": 2 }