Merge branch 'master' into line-rating
This commit is contained in:
commit
8c3db33126
42
Snakefile
42
Snakefile
@ -3,7 +3,7 @@
|
|||||||
# SPDX-License-Identifier: MIT
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
from os.path import normpath, exists
|
from os.path import normpath, exists
|
||||||
from shutil import copyfile
|
from shutil import copyfile, move
|
||||||
|
|
||||||
from snakemake.remote.HTTP import RemoteProvider as HTTPRemoteProvider
|
from snakemake.remote.HTTP import RemoteProvider as HTTPRemoteProvider
|
||||||
HTTP = HTTPRemoteProvider()
|
HTTP = HTTPRemoteProvider()
|
||||||
@ -45,7 +45,7 @@ if config['enable'].get('prepare_links_p_nom', False):
|
|||||||
output: 'data/links_p_nom.csv'
|
output: 'data/links_p_nom.csv'
|
||||||
log: 'logs/prepare_links_p_nom.log'
|
log: 'logs/prepare_links_p_nom.log'
|
||||||
threads: 1
|
threads: 1
|
||||||
resources: mem=500
|
resources: mem_mb=500
|
||||||
script: 'scripts/prepare_links_p_nom.py'
|
script: 'scripts/prepare_links_p_nom.py'
|
||||||
|
|
||||||
|
|
||||||
@ -70,7 +70,7 @@ if config['enable'].get('retrieve_databundle', True):
|
|||||||
rule retrieve_load_data:
|
rule retrieve_load_data:
|
||||||
input: HTTP.remote("data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv", keep_local=True, static=True)
|
input: HTTP.remote("data.open-power-system-data.org/time_series/2019-06-05/time_series_60min_singleindex.csv", keep_local=True, static=True)
|
||||||
output: "data/load_raw.csv"
|
output: "data/load_raw.csv"
|
||||||
shell: "mv {input} {output}"
|
run: move(input[0], output[0])
|
||||||
|
|
||||||
|
|
||||||
rule build_load_data:
|
rule build_load_data:
|
||||||
@ -87,7 +87,7 @@ rule build_powerplants:
|
|||||||
output: "resources/powerplants.csv"
|
output: "resources/powerplants.csv"
|
||||||
log: "logs/build_powerplants.log"
|
log: "logs/build_powerplants.log"
|
||||||
threads: 1
|
threads: 1
|
||||||
resources: mem=500
|
resources: mem_mb=500
|
||||||
script: "scripts/build_powerplants.py"
|
script: "scripts/build_powerplants.py"
|
||||||
|
|
||||||
|
|
||||||
@ -108,7 +108,7 @@ rule base_network:
|
|||||||
log: "logs/base_network.log"
|
log: "logs/base_network.log"
|
||||||
benchmark: "benchmarks/base_network"
|
benchmark: "benchmarks/base_network"
|
||||||
threads: 1
|
threads: 1
|
||||||
resources: mem=500
|
resources: mem_mb=500
|
||||||
script: "scripts/base_network.py"
|
script: "scripts/base_network.py"
|
||||||
|
|
||||||
|
|
||||||
@ -128,7 +128,7 @@ rule build_shapes:
|
|||||||
nuts3_shapes='resources/nuts3_shapes.geojson'
|
nuts3_shapes='resources/nuts3_shapes.geojson'
|
||||||
log: "logs/build_shapes.log"
|
log: "logs/build_shapes.log"
|
||||||
threads: 1
|
threads: 1
|
||||||
resources: mem=500
|
resources: mem_mb=500
|
||||||
script: "scripts/build_shapes.py"
|
script: "scripts/build_shapes.py"
|
||||||
|
|
||||||
|
|
||||||
@ -142,7 +142,7 @@ rule build_bus_regions:
|
|||||||
regions_offshore="resources/regions_offshore.geojson"
|
regions_offshore="resources/regions_offshore.geojson"
|
||||||
log: "logs/build_bus_regions.log"
|
log: "logs/build_bus_regions.log"
|
||||||
threads: 1
|
threads: 1
|
||||||
resources: mem=1000
|
resources: mem_mb=1000
|
||||||
script: "scripts/build_bus_regions.py"
|
script: "scripts/build_bus_regions.py"
|
||||||
|
|
||||||
if config['enable'].get('build_cutout', False):
|
if config['enable'].get('build_cutout', False):
|
||||||
@ -154,7 +154,7 @@ if config['enable'].get('build_cutout', False):
|
|||||||
log: "logs/build_cutout/{cutout}.log"
|
log: "logs/build_cutout/{cutout}.log"
|
||||||
benchmark: "benchmarks/build_cutout_{cutout}"
|
benchmark: "benchmarks/build_cutout_{cutout}"
|
||||||
threads: ATLITE_NPROCESSES
|
threads: ATLITE_NPROCESSES
|
||||||
resources: mem=ATLITE_NPROCESSES * 1000
|
resources: mem_mb=ATLITE_NPROCESSES * 1000
|
||||||
script: "scripts/build_cutout.py"
|
script: "scripts/build_cutout.py"
|
||||||
|
|
||||||
|
|
||||||
@ -162,7 +162,7 @@ if config['enable'].get('retrieve_cutout', True):
|
|||||||
rule retrieve_cutout:
|
rule retrieve_cutout:
|
||||||
input: HTTP.remote("zenodo.org/record/4709858/files/{cutout}.nc", keep_local=True, static=True)
|
input: HTTP.remote("zenodo.org/record/4709858/files/{cutout}.nc", keep_local=True, static=True)
|
||||||
output: "cutouts/{cutout}.nc"
|
output: "cutouts/{cutout}.nc"
|
||||||
shell: "mv {input} {output}"
|
run: move(input[0], output[0])
|
||||||
|
|
||||||
|
|
||||||
if config['enable'].get('build_natura_raster', False):
|
if config['enable'].get('build_natura_raster', False):
|
||||||
@ -179,7 +179,7 @@ if config['enable'].get('retrieve_natura_raster', True):
|
|||||||
rule retrieve_natura_raster:
|
rule retrieve_natura_raster:
|
||||||
input: HTTP.remote("zenodo.org/record/4706686/files/natura.tiff", keep_local=True, static=True)
|
input: HTTP.remote("zenodo.org/record/4706686/files/natura.tiff", keep_local=True, static=True)
|
||||||
output: "resources/natura.tiff"
|
output: "resources/natura.tiff"
|
||||||
shell: "mv {input} {output}"
|
run: move(input[0], output[0])
|
||||||
|
|
||||||
|
|
||||||
rule build_renewable_profiles:
|
rule build_renewable_profiles:
|
||||||
@ -200,7 +200,7 @@ rule build_renewable_profiles:
|
|||||||
log: "logs/build_renewable_profile_{technology}.log"
|
log: "logs/build_renewable_profile_{technology}.log"
|
||||||
benchmark: "benchmarks/build_renewable_profiles_{technology}"
|
benchmark: "benchmarks/build_renewable_profiles_{technology}"
|
||||||
threads: ATLITE_NPROCESSES
|
threads: ATLITE_NPROCESSES
|
||||||
resources: mem=ATLITE_NPROCESSES * 5000
|
resources: mem_mb=ATLITE_NPROCESSES * 5000
|
||||||
script: "scripts/build_renewable_profiles.py"
|
script: "scripts/build_renewable_profiles.py"
|
||||||
|
|
||||||
|
|
||||||
@ -212,7 +212,7 @@ if 'hydro' in config['renewable'].keys():
|
|||||||
cutout="cutouts/" + config["renewable"]['hydro']['cutout'] + ".nc"
|
cutout="cutouts/" + config["renewable"]['hydro']['cutout'] + ".nc"
|
||||||
output: 'resources/profile_hydro.nc'
|
output: 'resources/profile_hydro.nc'
|
||||||
log: "logs/build_hydro_profile.log"
|
log: "logs/build_hydro_profile.log"
|
||||||
resources: mem=5000
|
resources: mem_mb=5000
|
||||||
script: 'scripts/build_hydro_profile.py'
|
script: 'scripts/build_hydro_profile.py'
|
||||||
|
|
||||||
if config['lines'].get('line_rating', False):
|
if config['lines'].get('line_rating', False):
|
||||||
@ -245,7 +245,7 @@ rule add_electricity:
|
|||||||
log: "logs/add_electricity.log"
|
log: "logs/add_electricity.log"
|
||||||
benchmark: "benchmarks/add_electricity"
|
benchmark: "benchmarks/add_electricity"
|
||||||
threads: 1
|
threads: 1
|
||||||
resources: mem=5000
|
resources: mem_mb=5000
|
||||||
script: "scripts/add_electricity.py"
|
script: "scripts/add_electricity.py"
|
||||||
|
|
||||||
|
|
||||||
@ -264,7 +264,7 @@ rule simplify_network:
|
|||||||
log: "logs/simplify_network/elec_s{simpl}.log"
|
log: "logs/simplify_network/elec_s{simpl}.log"
|
||||||
benchmark: "benchmarks/simplify_network/elec_s{simpl}"
|
benchmark: "benchmarks/simplify_network/elec_s{simpl}"
|
||||||
threads: 1
|
threads: 1
|
||||||
resources: mem=4000
|
resources: mem_mb=4000
|
||||||
script: "scripts/simplify_network.py"
|
script: "scripts/simplify_network.py"
|
||||||
|
|
||||||
|
|
||||||
@ -286,7 +286,7 @@ rule cluster_network:
|
|||||||
log: "logs/cluster_network/elec_s{simpl}_{clusters}.log"
|
log: "logs/cluster_network/elec_s{simpl}_{clusters}.log"
|
||||||
benchmark: "benchmarks/cluster_network/elec_s{simpl}_{clusters}"
|
benchmark: "benchmarks/cluster_network/elec_s{simpl}_{clusters}"
|
||||||
threads: 1
|
threads: 1
|
||||||
resources: mem=6000
|
resources: mem_mb=6000
|
||||||
script: "scripts/cluster_network.py"
|
script: "scripts/cluster_network.py"
|
||||||
|
|
||||||
|
|
||||||
@ -298,7 +298,7 @@ rule add_extra_components:
|
|||||||
log: "logs/add_extra_components/elec_s{simpl}_{clusters}.log"
|
log: "logs/add_extra_components/elec_s{simpl}_{clusters}.log"
|
||||||
benchmark: "benchmarks/add_extra_components/elec_s{simpl}_{clusters}_ec"
|
benchmark: "benchmarks/add_extra_components/elec_s{simpl}_{clusters}_ec"
|
||||||
threads: 1
|
threads: 1
|
||||||
resources: mem=3000
|
resources: mem_mb=3000
|
||||||
script: "scripts/add_extra_components.py"
|
script: "scripts/add_extra_components.py"
|
||||||
|
|
||||||
|
|
||||||
@ -308,7 +308,7 @@ rule prepare_network:
|
|||||||
log: "logs/prepare_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.log"
|
log: "logs/prepare_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}.log"
|
||||||
benchmark: "benchmarks/prepare_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}"
|
benchmark: "benchmarks/prepare_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}"
|
||||||
threads: 1
|
threads: 1
|
||||||
resources: mem=4000
|
resources: mem_mb=4000
|
||||||
script: "scripts/prepare_network.py"
|
script: "scripts/prepare_network.py"
|
||||||
|
|
||||||
|
|
||||||
@ -339,8 +339,8 @@ rule solve_network:
|
|||||||
memory="logs/solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_memory.log"
|
memory="logs/solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_memory.log"
|
||||||
benchmark: "benchmarks/solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}"
|
benchmark: "benchmarks/solve_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}"
|
||||||
threads: 4
|
threads: 4
|
||||||
resources: mem=memory
|
resources: mem_mb=memory
|
||||||
shadow: "shallow"
|
shadow: "minimal"
|
||||||
script: "scripts/solve_network.py"
|
script: "scripts/solve_network.py"
|
||||||
|
|
||||||
|
|
||||||
@ -355,8 +355,8 @@ rule solve_operations_network:
|
|||||||
memory="logs/solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_memory.log"
|
memory="logs/solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}_op_memory.log"
|
||||||
benchmark: "benchmarks/solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}"
|
benchmark: "benchmarks/solve_operations_network/elec_s{simpl}_{clusters}_ec_l{ll}_{opts}"
|
||||||
threads: 4
|
threads: 4
|
||||||
resources: mem=(lambda w: 5000 + 372 * int(w.clusters))
|
resources: mem_mb=(lambda w: 5000 + 372 * int(w.clusters))
|
||||||
shadow: "shallow"
|
shadow: "minimal"
|
||||||
script: "scripts/solve_operations_network.py"
|
script: "scripts/solve_operations_network.py"
|
||||||
|
|
||||||
|
|
||||||
|
@ -22,6 +22,10 @@ Upcoming Release
|
|||||||
correction factor for solar PV capacity factors by default while satellite data is used.
|
correction factor for solar PV capacity factors by default while satellite data is used.
|
||||||
A correction factor of 0.854337 is recommended if reanalysis data like ERA5 is used.
|
A correction factor of 0.854337 is recommended if reanalysis data like ERA5 is used.
|
||||||
|
|
||||||
|
* Resource definitions for memory usage now follow [Snakemake standard resource definition](https://snakemake.readthedocs.io/en/stable/snakefiles/rules.html#standard-resources) ```mem_mb`` rather than ``mem``.
|
||||||
|
|
||||||
|
* Network building is made deterministic by supplying a fixed random state to network clustering routines.
|
||||||
|
|
||||||
|
|
||||||
PyPSA-Eur 0.4.0 (22th September 2021)
|
PyPSA-Eur 0.4.0 (22th September 2021)
|
||||||
=====================================
|
=====================================
|
||||||
|
@ -26,7 +26,7 @@ dependencies:
|
|||||||
- lxml
|
- lxml
|
||||||
- powerplantmatching>=0.4.8
|
- powerplantmatching>=0.4.8
|
||||||
- numpy
|
- numpy
|
||||||
- pandas<1.3
|
- pandas
|
||||||
- geopandas
|
- geopandas
|
||||||
- xarray
|
- xarray
|
||||||
- netcdf4
|
- netcdf4
|
||||||
|
@ -245,7 +245,9 @@ def _add_links_from_tyndp(buses, links, links_tyndp, europe_shape):
|
|||||||
|
|
||||||
links_tyndp.index = "T" + links_tyndp.index.astype(str)
|
links_tyndp.index = "T" + links_tyndp.index.astype(str)
|
||||||
|
|
||||||
return buses, links.append(links_tyndp, sort=True)
|
links = pd.concat([links, links_tyndp], sort=True)
|
||||||
|
|
||||||
|
return buses, links
|
||||||
|
|
||||||
|
|
||||||
def _load_lines_from_eg(buses, eg_lines):
|
def _load_lines_from_eg(buses, eg_lines):
|
||||||
@ -558,7 +560,6 @@ def base_network(eg_buses, eg_converters, eg_transformers, eg_lines, eg_links,
|
|||||||
n.name = 'PyPSA-Eur'
|
n.name = 'PyPSA-Eur'
|
||||||
|
|
||||||
n.set_snapshots(pd.date_range(freq='h', **config['snapshots']))
|
n.set_snapshots(pd.date_range(freq='h', **config['snapshots']))
|
||||||
n.snapshot_weightings[:] *= 8760. / n.snapshot_weightings.sum()
|
|
||||||
|
|
||||||
n.import_components_from_dataframe(buses, "Bus")
|
n.import_components_from_dataframe(buses, "Bus")
|
||||||
n.import_components_from_dataframe(lines, "Line")
|
n.import_components_from_dataframe(lines, "Line")
|
||||||
|
@ -91,7 +91,7 @@ def add_custom_powerplants(ppl, custom_powerplants, custom_ppl_query=False):
|
|||||||
dtype={'bus': 'str'})
|
dtype={'bus': 'str'})
|
||||||
if isinstance(custom_ppl_query, str):
|
if isinstance(custom_ppl_query, str):
|
||||||
add_ppls.query(custom_ppl_query, inplace=True)
|
add_ppls.query(custom_ppl_query, inplace=True)
|
||||||
return ppl.append(add_ppls, sort=False, ignore_index=True, verify_integrity=True)
|
return pd.concat([ppl, add_ppls], sort=False, ignore_index=True, verify_integrity=True)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
@ -206,12 +206,12 @@ if __name__ == '__main__':
|
|||||||
noprogress = not snakemake.config['atlite'].get('show_progress', True)
|
noprogress = not snakemake.config['atlite'].get('show_progress', True)
|
||||||
config = snakemake.config['renewable'][snakemake.wildcards.technology]
|
config = snakemake.config['renewable'][snakemake.wildcards.technology]
|
||||||
resource = config['resource'] # pv panel config / wind turbine config
|
resource = config['resource'] # pv panel config / wind turbine config
|
||||||
correction_factor = snakemake.config.get('correction_factor', 1.)
|
correction_factor = config.get('correction_factor', 1.)
|
||||||
capacity_per_sqkm = config['capacity_per_sqkm']
|
capacity_per_sqkm = config['capacity_per_sqkm']
|
||||||
p_nom_max_meth = snakemake.config.get('potential', 'conservative')
|
p_nom_max_meth = config.get('potential', 'conservative')
|
||||||
|
|
||||||
if isinstance(config.get("corine", {}), list):
|
if isinstance(config.get("corine", {}), list):
|
||||||
snakemake.config['corine'] = {'grid_codes': config['corine']}
|
config['corine'] = {'grid_codes': config['corine']}
|
||||||
|
|
||||||
if correction_factor != 1.:
|
if correction_factor != 1.:
|
||||||
logger.info(f'correction_factor is set as {correction_factor}')
|
logger.info(f'correction_factor is set as {correction_factor}')
|
||||||
@ -226,7 +226,7 @@ if __name__ == '__main__':
|
|||||||
if config['natura']:
|
if config['natura']:
|
||||||
excluder.add_raster(snakemake.input.natura, nodata=0, allow_no_overlap=True)
|
excluder.add_raster(snakemake.input.natura, nodata=0, allow_no_overlap=True)
|
||||||
|
|
||||||
corine = snakemake.config.get("corine", {})
|
corine = config.get("corine", {})
|
||||||
if "grid_codes" in corine:
|
if "grid_codes" in corine:
|
||||||
codes = corine["grid_codes"]
|
codes = corine["grid_codes"]
|
||||||
excluder.add_raster(snakemake.input.corine, codes=codes, invert=True, crs=3035)
|
excluder.add_raster(snakemake.input.corine, codes=codes, invert=True, crs=3035)
|
||||||
@ -326,11 +326,11 @@ if __name__ == '__main__':
|
|||||||
ds['underwater_fraction'] = xr.DataArray(underwater_fraction, [buses])
|
ds['underwater_fraction'] = xr.DataArray(underwater_fraction, [buses])
|
||||||
|
|
||||||
# select only buses with some capacity and minimal capacity factor
|
# select only buses with some capacity and minimal capacity factor
|
||||||
ds = ds.sel(bus=((ds['profile'].mean('time') > snakemake.config.get('min_p_max_pu', 0.)) &
|
ds = ds.sel(bus=((ds['profile'].mean('time') > config.get('min_p_max_pu', 0.)) &
|
||||||
(ds['p_nom_max'] > snakemake.config.get('min_p_nom_max', 0.))))
|
(ds['p_nom_max'] > config.get('min_p_nom_max', 0.))))
|
||||||
|
|
||||||
if 'clip_p_max_pu' in snakemake.config:
|
if 'clip_p_max_pu' in config:
|
||||||
min_p_max_pu = snakemake.config['clip_p_max_pu']
|
min_p_max_pu = config['clip_p_max_pu']
|
||||||
ds['profile'] = ds['profile'].where(ds['profile'] >= min_p_max_pu, 0)
|
ds['profile'] = ds['profile'].where(ds['profile'] >= min_p_max_pu, 0)
|
||||||
|
|
||||||
ds.to_netcdf(snakemake.output.profile)
|
ds.to_netcdf(snakemake.output.profile)
|
||||||
|
@ -169,8 +169,10 @@ def nuts3(country_shapes, nuts3, nuts3pop, nuts3gdp, ch_cantons, ch_popgdp):
|
|||||||
swiss = pd.read_excel(ch_popgdp, skiprows=3, index_col=0)
|
swiss = pd.read_excel(ch_popgdp, skiprows=3, index_col=0)
|
||||||
swiss.columns = swiss.columns.to_series().map(cantons)
|
swiss.columns = swiss.columns.to_series().map(cantons)
|
||||||
|
|
||||||
pop = pop.append(pd.to_numeric(swiss.loc['Residents in 1000', 'CH040':]))
|
swiss_pop = pd.to_numeric(swiss.loc['Residents in 1000', 'CH040':])
|
||||||
gdp = gdp.append(pd.to_numeric(swiss.loc['Gross domestic product per capita in Swiss francs', 'CH040':]))
|
pop = pd.concat([pop, swiss_pop])
|
||||||
|
swiss_gdp = pd.to_numeric(swiss.loc['Gross domestic product per capita in Swiss francs', 'CH040':])
|
||||||
|
gdp = pd.concat([gdp, swiss_gdp])
|
||||||
|
|
||||||
df = df.join(pd.DataFrame(dict(pop=pop, gdp=gdp)))
|
df = df.join(pd.DataFrame(dict(pop=pop, gdp=gdp)))
|
||||||
|
|
||||||
@ -194,7 +196,7 @@ def nuts3(country_shapes, nuts3, nuts3pop, nuts3gdp, ch_cantons, ch_popgdp):
|
|||||||
manual['geometry'] = manual['country'].map(country_shapes)
|
manual['geometry'] = manual['country'].map(country_shapes)
|
||||||
manual = manual.dropna()
|
manual = manual.dropna()
|
||||||
|
|
||||||
df = df.append(manual, sort=False)
|
df = pd.concat([df, manual], sort=False)
|
||||||
|
|
||||||
df.loc['ME000', 'pop'] = 650.
|
df.loc['ME000', 'pop'] = 650.
|
||||||
|
|
||||||
|
@ -226,6 +226,7 @@ def busmap_for_n_clusters(n, n_clusters, solver_name, focus_weights=None, algori
|
|||||||
algorithm_kwds.setdefault('n_init', 1000)
|
algorithm_kwds.setdefault('n_init', 1000)
|
||||||
algorithm_kwds.setdefault('max_iter', 30000)
|
algorithm_kwds.setdefault('max_iter', 30000)
|
||||||
algorithm_kwds.setdefault('tol', 1e-6)
|
algorithm_kwds.setdefault('tol', 1e-6)
|
||||||
|
algorithm_kwds.setdefault('random_state', 0)
|
||||||
|
|
||||||
n.determine_network_topology()
|
n.determine_network_topology()
|
||||||
|
|
||||||
|
@ -124,7 +124,8 @@ def simplify_network_to_380(n):
|
|||||||
several_trafo_b = trafo_map.isin(trafo_map.index)
|
several_trafo_b = trafo_map.isin(trafo_map.index)
|
||||||
trafo_map.loc[several_trafo_b] = trafo_map.loc[several_trafo_b].map(trafo_map)
|
trafo_map.loc[several_trafo_b] = trafo_map.loc[several_trafo_b].map(trafo_map)
|
||||||
missing_buses_i = n.buses.index.difference(trafo_map.index)
|
missing_buses_i = n.buses.index.difference(trafo_map.index)
|
||||||
trafo_map = trafo_map.append(pd.Series(missing_buses_i, missing_buses_i))
|
missing = pd.Series(missing_buses_i, missing_buses_i)
|
||||||
|
trafo_map = pd.concat([trafo_map, missing])
|
||||||
|
|
||||||
for c in n.one_port_components|n.branch_components:
|
for c in n.one_port_components|n.branch_components:
|
||||||
df = n.df(c)
|
df = n.df(c)
|
||||||
|
Loading…
Reference in New Issue
Block a user