fix import order

This commit is contained in:
Fabian 2024-01-19 10:47:58 +01:00
parent f876d78ed4
commit b1d21813af
18 changed files with 34 additions and 58 deletions

View File

@ -13,15 +13,15 @@ import os
import sys import sys
import time import time
from memory_profiler import _get_memory, choose_backend
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
# TODO: provide alternative when multiprocessing is not available # TODO: provide alternative when multiprocessing is not available
try: try:
from multiprocessing import Pipe, Process from multiprocessing import Pipe, Process
except ImportError: except ImportError:
from multiprocessing.dummy import Process, Pipe from multiprocessing.dummy import Pipe, Process
from memory_profiler import _get_memory, choose_backend
# The memory logging facilities have been adapted from memory_profiler # The memory logging facilities have been adapted from memory_profiler

View File

@ -8,17 +8,15 @@ Prepares brownfield data from previous planning horizon.
import logging import logging
logger = logging.getLogger(__name__)
import pandas as pd
idx = pd.IndexSlice
import numpy as np import numpy as np
import pandas as pd
import pypsa import pypsa
from _helpers import update_config_with_sector_opts from _helpers import update_config_with_sector_opts
from add_existing_baseyear import add_build_year_to_new_assets from add_existing_baseyear import add_build_year_to_new_assets
logger = logging.getLogger(__name__)
idx = pd.IndexSlice
def add_brownfield(n, n_p, year): def add_brownfield(n, n_p, year):
logger.info(f"Preparing brownfield for the year {year}") logger.info(f"Preparing brownfield for the year {year}")

View File

@ -8,25 +8,20 @@ horizon.
""" """
import logging import logging
logger = logging.getLogger(__name__)
import pandas as pd
idx = pd.IndexSlice
from types import SimpleNamespace from types import SimpleNamespace
import country_converter as coco import country_converter as coco
import numpy as np import numpy as np
import pandas as pd
import pypsa import pypsa
import xarray as xr import xarray as xr
from _helpers import update_config_with_sector_opts from _helpers import update_config_with_sector_opts
from add_electricity import sanitize_carriers from add_electricity import sanitize_carriers
from prepare_sector_network import cluster_heat_buses, define_spatial, prepare_costs from prepare_sector_network import cluster_heat_buses, define_spatial, prepare_costs
logger = logging.getLogger(__name__)
cc = coco.CountryConverter() cc = coco.CountryConverter()
idx = pd.IndexSlice
spatial = SimpleNamespace() spatial = SimpleNamespace()

View File

@ -9,11 +9,11 @@ using data from JRC ENSPRESO.
import logging import logging
logger = logging.getLogger(__name__)
import geopandas as gpd import geopandas as gpd
import numpy as np import numpy as np
import pandas as pd import pandas as pd
logger = logging.getLogger(__name__)
AVAILABLE_BIOMASS_YEARS = [2010, 2020, 2030, 2040, 2050] AVAILABLE_BIOMASS_YEARS = [2010, 2020, 2030, 2040, 2050]

View File

@ -41,12 +41,13 @@ Outputs
import logging import logging
logger = logging.getLogger(__name__)
import numpy as np import numpy as np
import pandas as pd import pandas as pd
from _helpers import configure_logging from _helpers import configure_logging
from pandas import Timedelta as Delta from pandas import Timedelta as Delta
logger = logging.getLogger(__name__)
def load_timeseries(fn, years, countries, powerstatistics=True): def load_timeseries(fn, years, countries, powerstatistics=True):
""" """

View File

@ -7,9 +7,6 @@ Build total energy demands per country using JRC IDEES, eurostat, and EEA data.
""" """
import logging import logging
logger = logging.getLogger(__name__)
import multiprocessing as mp import multiprocessing as mp
from functools import partial from functools import partial
@ -21,7 +18,7 @@ from _helpers import mute_print
from tqdm import tqdm from tqdm import tqdm
cc = coco.CountryConverter() cc = coco.CountryConverter()
logger = logging.getLogger(__name__)
idx = pd.IndexSlice idx = pd.IndexSlice

View File

@ -9,12 +9,12 @@ production sites with data from SciGRID_gas and Global Energy Monitor.
import logging import logging
logger = logging.getLogger(__name__)
import geopandas as gpd import geopandas as gpd
import pandas as pd import pandas as pd
from cluster_gas_network import load_bus_regions from cluster_gas_network import load_bus_regions
logger = logging.getLogger(__name__)
def read_scigrid_gas(fn): def read_scigrid_gas(fn):
df = gpd.read_file(fn) df = gpd.read_file(fn)

View File

@ -9,13 +9,13 @@ Preprocess gas network based on data from bthe SciGRID_gas project
import logging import logging
logger = logging.getLogger(__name__)
import geopandas as gpd import geopandas as gpd
import pandas as pd import pandas as pd
from pypsa.geo import haversine_pts from pypsa.geo import haversine_pts
from shapely.geometry import Point from shapely.geometry import Point
logger = logging.getLogger(__name__)
def diameter_to_capacity(pipe_diameter_mm): def diameter_to_capacity(pipe_diameter_mm):
""" """

View File

@ -7,9 +7,6 @@ Build spatial distribution of industries from Hotmaps database.
""" """
import logging import logging
logger = logging.getLogger(__name__)
import uuid import uuid
from itertools import product from itertools import product
@ -18,6 +15,7 @@ import geopandas as gpd
import pandas as pd import pandas as pd
from packaging.version import Version, parse from packaging.version import Version, parse
logger = logging.getLogger(__name__)
cc = coco.CountryConverter() cc = coco.CountryConverter()

View File

@ -7,11 +7,8 @@ Build industrial production per country.
""" """
import logging import logging
from functools import partial
logger = logging.getLogger(__name__)
import multiprocessing as mp import multiprocessing as mp
from functools import partial
import country_converter as coco import country_converter as coco
import numpy as np import numpy as np
@ -19,6 +16,7 @@ import pandas as pd
from _helpers import mute_print from _helpers import mute_print
from tqdm import tqdm from tqdm import tqdm
logger = logging.getLogger(__name__)
cc = coco.CountryConverter() cc = coco.CountryConverter()
tj_to_ktoe = 0.0238845 tj_to_ktoe = 0.0238845

View File

@ -8,15 +8,14 @@ Build mapping between cutout grid cells and population (total, urban, rural).
import logging import logging
logger = logging.getLogger(__name__)
import atlite import atlite
import geopandas as gpd import geopandas as gpd
import numpy as np import numpy as np
import pandas as pd import pandas as pd
import xarray as xr import xarray as xr
logger = logging.getLogger(__name__)
if __name__ == "__main__": if __name__ == "__main__":
if "snakemake" not in globals(): if "snakemake" not in globals():
from _helpers import mock_snakemake from _helpers import mock_snakemake

View File

@ -8,14 +8,14 @@ Cluster gas transmission network to clustered model regions.
import logging import logging
logger = logging.getLogger(__name__)
import geopandas as gpd import geopandas as gpd
import pandas as pd import pandas as pd
from packaging.version import Version, parse from packaging.version import Version, parse
from pypsa.geo import haversine_pts from pypsa.geo import haversine_pts
from shapely import wkt from shapely import wkt
logger = logging.getLogger(__name__)
def concat_gdf(gdf_list, crs="EPSG:4326"): def concat_gdf(gdf_list, crs="EPSG:4326"):
""" """

View File

@ -8,9 +8,6 @@ capacity factors, curtailment, energy balances, prices and other metrics.
""" """
import logging import logging
logger = logging.getLogger(__name__)
import sys import sys
import numpy as np import numpy as np
@ -19,7 +16,7 @@ import pypsa
from prepare_sector_network import prepare_costs from prepare_sector_network import prepare_costs
idx = pd.IndexSlice idx = pd.IndexSlice
logger = logging.getLogger(__name__)
opt_name = {"Store": "e", "Line": "s", "Transformer": "s"} opt_name = {"Store": "e", "Line": "s", "Transformer": "s"}

View File

@ -13,8 +13,6 @@ nodes.
import logging import logging
logger = logging.getLogger(__name__)
import cartopy.crs as ccrs import cartopy.crs as ccrs
import geopandas as gpd import geopandas as gpd
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
@ -24,6 +22,7 @@ from make_summary import assign_carriers
from plot_summary import preferred_order, rename_techs from plot_summary import preferred_order, rename_techs
from pypsa.plot import add_legend_circles, add_legend_lines, add_legend_patches from pypsa.plot import add_legend_circles, add_legend_lines, add_legend_patches
logger = logging.getLogger(__name__)
plt.style.use(["ggplot"]) plt.style.use(["ggplot"])

View File

@ -8,12 +8,11 @@ Creates plots from summary CSV files.
import logging import logging
logger = logging.getLogger(__name__)
import matplotlib.gridspec as gridspec import matplotlib.gridspec as gridspec
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import pandas as pd import pandas as pd
logger = logging.getLogger(__name__)
plt.style.use("ggplot") plt.style.use("ggplot")
from prepare_sector_network import co2_emissions_year from prepare_sector_network import co2_emissions_year

View File

@ -11,6 +11,7 @@ import logging
import os import os
import re import re
from itertools import product from itertools import product
from types import SimpleNamespace
import networkx as nx import networkx as nx
import numpy as np import numpy as np
@ -22,18 +23,13 @@ from add_electricity import calculate_annuity, sanitize_carriers
from build_energy_totals import build_co2_totals, build_eea_co2, build_eurostat_co2 from build_energy_totals import build_co2_totals, build_eea_co2, build_eurostat_co2
from networkx.algorithms import complement from networkx.algorithms import complement
from networkx.algorithms.connectivity.edge_augmentation import k_edge_augmentation from networkx.algorithms.connectivity.edge_augmentation import k_edge_augmentation
from packaging.version import Version, parse
from pypsa.geo import haversine_pts from pypsa.geo import haversine_pts
from pypsa.io import import_components_from_dataframe from pypsa.io import import_components_from_dataframe
from scipy.stats import beta from scipy.stats import beta
logger = logging.getLogger(__name__)
from types import SimpleNamespace
spatial = SimpleNamespace() spatial = SimpleNamespace()
logger = logging.getLogger(__name__)
from packaging.version import Version, parse
pd_version = parse(pd.__version__) pd_version = parse(pd.__version__)
agg_group_kwargs = dict(numeric_only=False) if pd_version >= Version("1.3") else {} agg_group_kwargs = dict(numeric_only=False) if pd_version >= Version("1.3") else {}

View File

@ -7,14 +7,13 @@ Retrieve and extract data bundle for sector-coupled studies.
""" """
import logging import logging
logger = logging.getLogger(__name__)
import tarfile import tarfile
from pathlib import Path from pathlib import Path
from _helpers import configure_logging, progress_retrieve, validate_checksum from _helpers import configure_logging, progress_retrieve, validate_checksum
logger = logging.getLogger(__name__)
if __name__ == "__main__": if __name__ == "__main__":
if "snakemake" not in globals(): if "snakemake" not in globals():
from _helpers import mock_snakemake from _helpers import mock_snakemake

View File

@ -39,10 +39,10 @@ import xarray as xr
from _benchmark import memory_logger from _benchmark import memory_logger
from _helpers import configure_logging, get_opt, update_config_with_sector_opts from _helpers import configure_logging, get_opt, update_config_with_sector_opts
from pypsa.descriptors import get_activity_mask from pypsa.descriptors import get_activity_mask
from pypsa.descriptors import get_switchable_as_dense as get_as_dense
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
pypsa.pf.logger.setLevel(logging.WARNING) pypsa.pf.logger.setLevel(logging.WARNING)
from pypsa.descriptors import get_switchable_as_dense as get_as_dense
def add_land_use_constraint(n, planning_horizons, config): def add_land_use_constraint(n, planning_horizons, config):