From 8d6cf2b5881681fe8f2deb07b41ec47d697669ef Mon Sep 17 00:00:00 2001 From: Valeriu Predoi Date: Mon, 21 Oct 2024 14:06:24 +0100 Subject: [PATCH 01/36] update comment in conda lock creation Github action (#3788) --- .github/workflows/create-condalock-file.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/create-condalock-file.yml b/.github/workflows/create-condalock-file.yml index 4ae10de3e2..7babd2a456 100644 --- a/.github/workflows/create-condalock-file.yml +++ b/.github/workflows/create-condalock-file.yml @@ -36,7 +36,8 @@ jobs: conda --version # setup-miniconda@v3 installs an old conda and mamba # forcing a modern mamba updates both mamba and conda - # pin <2 due to https://github.com/ESMValGroup/ESMValTool/pull/3771 + # unpin mamba after conda-lock=3 release + # see github.com/ESMValGroup/ESMValTool/issues/3782 conda install -c conda-forge "mamba>=1.4.8,<2" conda config --show-sources conda config --show From 5009b478df6888e9c7b3957ca1fd2a25bb5697ac Mon Sep 17 00:00:00 2001 From: max-anu <137736464+max-anu@users.noreply.github.com> Date: Tue, 22 Oct 2024 09:49:55 +1100 Subject: [PATCH 02/36] Adding pr, tauu, tauv NOAA-CIRES-20CR-V2 CMORISER (#3763) Co-authored-by: Max Proft Co-authored-by: Felicity Chun <32269066+flicj191@users.noreply.github.com> --- doc/sphinx/source/input.rst | 2 +- .../data/cmor_config/NOAA-CIRES-20CR-V2.yml | 18 ++++++++++++++++++ esmvaltool/cmorizers/data/datasets.yml | 4 +++- .../downloaders/datasets/noaa_cires_20cr_v2.py | 8 +++++++- .../data/formatters/datasets/ncep_ncar_r1.py | 3 +++ .../recipes/examples/recipe_check_obs.yml | 3 +++ 6 files changed, 35 insertions(+), 3 deletions(-) diff --git a/doc/sphinx/source/input.rst b/doc/sphinx/source/input.rst index 65aef57cd8..f3562c2507 100644 --- a/doc/sphinx/source/input.rst +++ b/doc/sphinx/source/input.rst @@ -410,7 +410,7 @@ A list of the datasets for which a CMORizers is available is provided in the fol +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | NIWA-BS | toz, tozStderr (Amon) | 3 | NCL | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| NOAA-CIRES-20CR-V2 | clt, clwvi, hus, prw, rlut, rsut (Amon) | 2 | Python | +| NOAA-CIRES-20CR-V2 | clt, clwvi, hus, prw, rlut, rsut, pr, tauu, tauv (Amon) | 2 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | NOAA-CIRES-20CR-V3 | clt, clwvi, hus, prw, rlut, rlutcs, rsut, rsutcs (Amon) | 2 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ diff --git a/esmvaltool/cmorizers/data/cmor_config/NOAA-CIRES-20CR-V2.yml b/esmvaltool/cmorizers/data/cmor_config/NOAA-CIRES-20CR-V2.yml index 7591e99257..faded8f9d6 100644 --- a/esmvaltool/cmorizers/data/cmor_config/NOAA-CIRES-20CR-V2.yml +++ b/esmvaltool/cmorizers/data/cmor_config/NOAA-CIRES-20CR-V2.yml @@ -44,3 +44,21 @@ variables: mip: Amon raw: uswrf file: 'uswrf.ntat.mon.mean.nc' + pr_month: + short_name: pr + mip: Amon + raw: prate + file: 'prate.mon.mean.nc' + tauu_month: + short_name: tauu + mip: Amon + raw: uflx + file: 'uflx.mon.mean.nc' + make_negative: true + tauv_month: + short_name: tauv + mip: Amon + raw: vflx + file: 'vflx.mon.mean.nc' + make_negative: true + diff --git a/esmvaltool/cmorizers/data/datasets.yml b/esmvaltool/cmorizers/data/datasets.yml index 8fcb6adc21..508b18ccec 100644 --- a/esmvaltool/cmorizers/data/datasets.yml +++ b/esmvaltool/cmorizers/data/datasets.yml @@ -1054,7 +1054,9 @@ datasets: gaussian/monolevel/tcdc.eatm.mon.mean.nc gaussian/monolevel/ulwrf.ntat.mon.mean.nc gaussian/monolevel/uswrf.ntat.mon.mean.nc - + gaussian/monolevel/prate.mon.mean.nc + gaussian/monolevel/uflx.mon.mean.nc + gaussian/monolevel/vflx.mon.mean.nc NOAA-CIRES-20CR-V3: tier: 2 source: ftp.cdc.noaa.gov/Projects/20thC_ReanV3/Monthlies/ diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/noaa_cires_20cr_v2.py b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_cires_20cr_v2.py index fb2d733f06..bbbd708293 100644 --- a/esmvaltool/cmorizers/data/downloaders/datasets/noaa_cires_20cr_v2.py +++ b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_cires_20cr_v2.py @@ -34,7 +34,7 @@ def download_dataset(config, dataset, dataset_info, start_date, end_date, ) downloader.connect() - downloader.set_cwd("Projects/20thC_ReanV2/Monthlies/") + downloader.set_cwd("/Projects/20thC_ReanV2/Monthlies/") downloader.download_file("monolevel/cldwtr.eatm.mon.mean.nc", sub_folder='surface') downloader.download_file("monolevel/pr_wtr.eatm.mon.mean.nc", @@ -47,3 +47,9 @@ def download_dataset(config, dataset, dataset_info, start_date, end_date, sub_folder='surface_gauss') downloader.download_file("gaussian/monolevel/uswrf.ntat.mon.mean.nc", sub_folder='surface_gauss') + downloader.download_file("gaussian/monolevel/prate.mon.mean.nc", + sub_folder='surface_gauss') + downloader.download_file("gaussian/monolevel/uflx.mon.mean.nc", + sub_folder='surface_gauss') + downloader.download_file("gaussian/monolevel/vflx.mon.mean.nc", + sub_folder='surface_gauss') diff --git a/esmvaltool/cmorizers/data/formatters/datasets/ncep_ncar_r1.py b/esmvaltool/cmorizers/data/formatters/datasets/ncep_ncar_r1.py index a74938be86..c0f33286d5 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/ncep_ncar_r1.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/ncep_ncar_r1.py @@ -143,6 +143,9 @@ def _extract_variable(short_name, var, cfg, raw_filepath, out_dir): cube = _fix_coordinates(cube, definition, cmor_info) + if var.get("make_negative"): + cube.data = -1 * cube.data + utils.save_variable( cube, short_name, diff --git a/esmvaltool/recipes/examples/recipe_check_obs.yml b/esmvaltool/recipes/examples/recipe_check_obs.yml index fd08dcadbc..8c7ba0a382 100644 --- a/esmvaltool/recipes/examples/recipe_check_obs.yml +++ b/esmvaltool/recipes/examples/recipe_check_obs.yml @@ -714,6 +714,9 @@ diagnostics: prw: rlut: rsut: + pr: + tauu: + tauv: additional_datasets: - {dataset: NOAA-CIRES-20CR-V2, project: OBS6, mip: Amon, tier: 2, type: reanaly, version: v2, start_year: 1871, end_year: 2012} From 8f7982c96a6b4dfe7809f70f9d8a075a3ba76809 Mon Sep 17 00:00:00 2001 From: Manuel Schlund <32543114+schlunma@users.noreply.github.com> Date: Tue, 22 Oct 2024 16:39:42 +0200 Subject: [PATCH 03/36] Adapt ESMValTool to new configuration (#3761) Co-authored-by: Bouwe Andela --- .circleci/config.yml | 4 +- config-user-example.yml | 274 ------------------ doc/sphinx/source/community/dataset.rst | 9 +- doc/sphinx/source/community/diagnostic.rst | 23 +- .../detailed_release_procedure.rst | 4 +- doc/sphinx/source/community/upgrading.rst | 6 +- doc/sphinx/source/develop/dataset.rst | 35 +-- doc/sphinx/source/faq.rst | 15 +- doc/sphinx/source/functionalities.rst | 8 +- doc/sphinx/source/input.rst | 12 +- .../source/quickstart/configuration.rst | 15 +- doc/sphinx/source/quickstart/output.rst | 33 ++- doc/sphinx/source/quickstart/running.rst | 8 +- .../source/recipes/recipe_carvalhais14nat.rst | 32 +- doc/sphinx/source/recipes/recipe_climwip.rst | 8 +- doc/sphinx/source/recipes/recipe_gier20bg.rst | 14 +- .../source/recipes/recipe_hydrology.rst | 8 +- .../source/recipes/recipe_ipccwg1ar6ch3.rst | 28 +- doc/sphinx/source/recipes/recipe_kcs.rst | 4 +- .../recipes/recipe_model_evaluation.rst | 6 +- doc/sphinx/source/recipes/recipe_monitor.rst | 6 +- doc/sphinx/source/recipes/recipe_oceans.rst | 12 +- doc/sphinx/source/recipes/recipe_rainfarm.rst | 4 +- .../source/recipes/recipe_shapeselect.rst | 2 +- .../source/recipes/recipe_wenzel14jgr.rst | 4 +- .../source/recipes/recipe_wenzel16nat.rst | 19 +- doc/sphinx/source/utils.rst | 9 +- esmvaltool/cmorizers/data/cmorizer.py | 115 ++++++-- esmvaltool/cmorizers/data/datasets.yml | 34 +-- .../download_scripts/download_era_interim.py | 9 +- .../data/downloaders/datasets/jra_55.py | 2 - .../downloaders/datasets/noaa_ersstv3b.py | 1 + .../data/downloaders/datasets/noaa_ersstv5.py | 1 + .../downloaders/datasets/nsidc_g02202_sh.py | 1 + .../data/formatters/datasets/ct2019.py | 2 +- .../data/formatters/datasets/merra.ncl | 7 +- .../data/formatters/datasets/mls_aura.py | 2 +- .../diag_scripts/kcs/local_resampling.py | 4 +- .../diag_scripts/monitor/compute_eofs.py | 4 +- esmvaltool/diag_scripts/monitor/monitor.py | 4 +- .../diag_scripts/monitor/multi_datasets.py | 4 +- .../russell18jgr/russell18jgr-fig6a.ncl | 6 +- .../russell18jgr/russell18jgr-fig6b.ncl | 13 +- .../russell18jgr/russell18jgr-fig7i.ncl | 9 +- .../russell18jgr/russell18jgr-fig9c.ncl | 9 +- esmvaltool/interface_scripts/logging.ncl | 6 +- .../recipes/examples/recipe_extract_shape.yml | 2 +- .../hydrology/recipe_hydro_forcing.yml | 4 +- .../recipes/hydrology/recipe_lisflood.yml | 3 +- .../recipes/hydrology/recipe_marrmot.yml | 3 +- .../recipe_ipccwg1ar6ch3_fig_3_42_a.yml | 2 +- esmvaltool/recipes/recipe_carvalhais14nat.yml | 2 +- esmvaltool/recipes/recipe_runoff_et.yml | 2 +- .../recipes/recipe_sea_surface_salinity.yml | 5 +- esmvaltool/recipes/recipe_shapeselect.yml | 3 +- esmvaltool/utils/batch-jobs/generate.py | 16 +- tests/integration/test_cmorizer.py | 69 ++++- tests/integration/test_diagnostic_run.py | 61 +++- 58 files changed, 462 insertions(+), 545 deletions(-) delete mode 100644 config-user-example.yml diff --git a/.circleci/config.yml b/.circleci/config.yml index eb13a0ef08..82492e724f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -216,8 +216,8 @@ jobs: conda activate esmvaltool mkdir -p ~/climate_data esmvaltool config get_config_user - echo "search_esgf: when_missing" >> ~/.esmvaltool/config-user.yml - cat ~/.esmvaltool/config-user.yml + echo "search_esgf: when_missing" >> ~/.config/esmvaltool/config-user.yml + cat ~/.config/esmvaltool/config-user.yml for recipe in esmvaltool/recipes/testing/recipe_*.yml; do esmvaltool run "$recipe" done diff --git a/config-user-example.yml b/config-user-example.yml deleted file mode 100644 index c102928db9..0000000000 --- a/config-user-example.yml +++ /dev/null @@ -1,274 +0,0 @@ -############################################################################### -# Example user configuration file for ESMValTool -############################################################################### -# -# Note for users: -# -------------- -# Site-specific entries for different HPC centers are given at the bottom of -# this file. Comment out/replace as needed. This default version of the file -# can be used in combination with the command line argument -# ``search_esgf=when_missing``. If only certain values are allowed for an -# option, these are listed after ``---``. The option in square brackets is the -# default value, i.e., the one that is used if this option is omitted in the -# file. -# -############################################################################### -# -# Note for developers: -# ------------------- -# Two identical copies of this file (``ESMValTool/config-user-example.yml`` and -# ``ESMValCore/esmvalcore/config-user.yml``) exist. If you change one of it, -# make sure to apply the changes to the other. -# -############################################################################### ---- - -# Destination directory where all output will be written -# Includes log files and performance stats. -output_dir: ~/esmvaltool_output - -# Auxiliary data directory -# Used by some recipes to look for additional datasets. -auxiliary_data_dir: ~/auxiliary_data - -# Automatic data download from ESGF --- [never]/when_missing/always -# Use automatic download of missing CMIP3, CMIP5, CMIP6, CORDEX, and obs4MIPs -# data from ESGF. ``never`` disables this feature, which is useful if you are -# working on a computer without an internet connection, or if you have limited -# disk space. ``when_missing`` enables the automatic download for files that -# are not available locally. ``always`` will always check ESGF for the latest -# version of a file, and will only use local files if they correspond to that -# latest version. -search_esgf: never - -# Directory for storing downloaded climate data -# Make sure to use a directory where you can store multiple GBs of data. Your -# home directory on a HPC is usually not suited for this purpose, so please -# change the default value in this case! -download_dir: ~/climate_data - -# Run at most this many tasks in parallel --- [null]/1/2/3/4/... -# Set to ``null`` to use the number of available CPUs. If you run out of -# memory, try setting max_parallel_tasks to ``1`` and check the amount of -# memory you need for that by inspecting the file ``run/resource_usage.txt`` in -# the output directory. Using the number there you can increase the number of -# parallel tasks again to a reasonable number for the amount of memory -# available in your system. -max_parallel_tasks: null - -# Log level of the console --- debug/[info]/warning/error -# For much more information printed to screen set log_level to ``debug``. -log_level: info - -# Exit on warning --- true/[false] -# # Only used in NCL diagnostic scripts. -exit_on_warning: false - -# Plot file format --- [png]/pdf/ps/eps/epsi -output_file_type: png - -# Remove the ``preproc`` directory if the run was successful --- [true]/false -# By default this option is set to ``true``, so all preprocessor output files -# will be removed after a successful run. Set to ``false`` if you need those -# files. -remove_preproc_dir: true - -# Use netCDF compression --- true/[false] -compress_netcdf: false - -# Save intermediary cubes in the preprocessor --- true/[false] -# Setting this to ``true`` will save the output cube from each preprocessing -# step. These files are numbered according to the preprocessing order. -save_intermediary_cubes: false - -# Path to custom ``config-developer.yml`` file -# This can be used to customise project configurations. See -# ``config-developer.yml`` for an example. Set to ``null`` to use the default. -config_developer_file: null - -# Use a profiling tool for the diagnostic run --- [false]/true -# A profiler tells you which functions in your code take most time to run. -# Only available for Python diagnostics. -profile_diagnostic: false - -# Rootpaths to the data from different projects -# This default setting will work if files have been downloaded by ESMValTool -# via ``search_esgf``. Lists are also possible. For site-specific entries and -# more examples, see below. Comment out these when using a site-specific path. -rootpath: - default: ~/climate_data - -# Directory structure for input data --- [default]/ESGF/BADC/DKRZ/ETHZ/etc. -# This default setting will work if files have been downloaded by ESMValTool -# via ``search_esgf``. See ``config-developer.yml`` for definitions. Comment -# out/replace as per needed. -drs: - CMIP3: ESGF - CMIP5: ESGF - CMIP6: ESGF - CORDEX: ESGF - obs4MIPs: ESGF - -# Example rootpaths and directory structure that showcases the different -# projects and also the use of lists -# For site-specific entries, see below. -#rootpath: -# CMIP3: [~/cmip3_inputpath1, ~/cmip3_inputpath2] -# CMIP5: [~/cmip5_inputpath1, ~/cmip5_inputpath2] -# CMIP6: [~/cmip6_inputpath1, ~/cmip6_inputpath2] -# OBS: ~/obs_inputpath -# OBS6: ~/obs6_inputpath -# obs4MIPs: ~/obs4mips_inputpath -# ana4mips: ~/ana4mips_inputpath -# native6: ~/native6_inputpath -# RAWOBS: ~/rawobs_inputpath -# default: ~/default_inputpath -#drs: -# CMIP3: default -# CMIP5: default -# CMIP6: default -# CORDEX: default -# obs4MIPs: default - -# Directory tree created by automatically downloading from ESGF -# Uncomment the lines below to locate data that has been automatically -# downloaded from ESGF (using ``search_esgf``). -#rootpath: -# CMIP3: ~/climate_data -# CMIP5: ~/climate_data -# CMIP6: ~/climate_data -# CORDEX: ~/climate_data -# obs4MIPs: ~/climate_data -#drs: -# CMIP3: ESGF -# CMIP5: ESGF -# CMIP6: ESGF -# CORDEX: ESGF -# obs4MIPs: ESGF - -# Site-specific entries: JASMIN -# Uncomment the lines below to locate data on JASMIN. -#auxiliary_data_dir: /gws/nopw/j04/esmeval/aux_data/AUX -#rootpath: -# CMIP6: /badc/cmip6/data/CMIP6 -# CMIP5: /badc/cmip5/data/cmip5/output1 -# CMIP3: /badc/cmip3_drs/data/cmip3/output -# OBS: /gws/nopw/j04/esmeval/obsdata-v2 -# OBS6: /gws/nopw/j04/esmeval/obsdata-v2 -# obs4MIPs: /gws/nopw/j04/esmeval/obsdata-v2 -# ana4mips: /gws/nopw/j04/esmeval/obsdata-v2 -# CORDEX: /badc/cordex/data/CORDEX/output -#drs: -# CMIP6: BADC -# CMIP5: BADC -# CMIP3: BADC -# CORDEX: BADC -# OBS: default -# OBS6: default -# obs4MIPs: default -# ana4mips: default - -# Site-specific entries: DKRZ-Levante -# For bd0854 members a shared download directory is available -#search_esgf: when_missing -#download_dir: /work/bd0854/DATA/ESMValTool2/download -# Uncomment the lines below to locate data on Levante at DKRZ. -#auxiliary_data_dir: /work/bd0854/DATA/ESMValTool2/AUX -#rootpath: -# CMIP6: /work/bd0854/DATA/ESMValTool2/CMIP6_DKRZ -# CMIP5: /work/bd0854/DATA/ESMValTool2/CMIP5_DKRZ -# CMIP3: /work/bd0854/DATA/ESMValTool2/CMIP3 -# CORDEX: /work/ik1017/C3SCORDEX/data/c3s-cordex/output -# OBS: /work/bd0854/DATA/ESMValTool2/OBS -# OBS6: /work/bd0854/DATA/ESMValTool2/OBS -# obs4MIPs: /work/bd0854/DATA/ESMValTool2/OBS -# ana4mips: /work/bd0854/DATA/ESMValTool2/OBS -# native6: /work/bd0854/DATA/ESMValTool2/RAWOBS -# RAWOBS: /work/bd0854/DATA/ESMValTool2/RAWOBS -#drs: -# CMIP6: DKRZ -# CMIP5: DKRZ -# CMIP3: DKRZ -# CORDEX: BADC -# obs4MIPs: default -# ana4mips: default -# OBS: default -# OBS6: default -# native6: default - -# Site-specific entries: ETHZ -# Uncomment the lines below to locate data at ETHZ. -#rootpath: -# CMIP6: /net/atmos/data/cmip6 -# CMIP5: /net/atmos/data/cmip5 -# CMIP3: /net/atmos/data/cmip3 -# OBS: /net/exo/landclim/PROJECTS/C3S/datadir/obsdir/ -#drs: -# CMIP6: ETHZ -# CMIP5: ETHZ -# CMIP3: ETHZ - -# Site-specific entries: IPSL -# Uncomment the lines below to locate data on Ciclad at IPSL. -#rootpath: -# IPSLCM: / -# CMIP5: /bdd/CMIP5/output -# CMIP6: /bdd/CMIP6 -# CMIP3: /bdd/CMIP3 -# CORDEX: /bdd/CORDEX/output -# obs4MIPs: /bdd/obs4MIPS/obs-CFMIP/observations -# ana4mips: /not_yet -# OBS: /not_yet -# OBS6: /not_yet -# RAWOBS: /not_yet -#drs: -# CMIP6: DKRZ -# CMIP5: DKRZ -# CMIP3: IPSL -# CORDEX: BADC -# obs4MIPs: IPSL -# ana4mips: default -# OBS: not_yet -# OBS6: not_yet - -# Site-specific entries: Met Office -# Uncomment the lines below to locate data at the Met Office. -#rootpath: -# CMIP5: /project/champ/data/cmip5/output1 -# CMIP6: /project/champ/data/CMIP6 -# CORDEX: /project/champ/data/cordex/output -# OBS: /data/users/esmval/ESMValTool/obs -# OBS6: /data/users/esmval/ESMValTool/obs -# obs4MIPs: /data/users/esmval/ESMValTool/obs -# ana4mips: /project/champ/data/ana4MIPs -# native6: /data/users/esmval/ESMValTool/rawobs -# RAWOBS: /data/users/esmval/ESMValTool/rawobs -#drs: -# CMIP5: BADC -# CMIP6: BADC -# CORDEX: BADC -# OBS: default -# OBS6: default -# obs4MIPs: default -# ana4mips: BADC -# native6: default - -# Site-specific entries: NCI -# Uncomment the lines below to locate data at NCI. -#rootpath: -# CMIP6: [/g/data/oi10/replicas/CMIP6, /g/data/fs38/publications/CMIP6, /g/data/xp65/public/apps/esmvaltool/replicas/CMIP6] -# CMIP5: [/g/data/r87/DRSv3/CMIP5, /g/data/al33/replicas/CMIP5/combined, /g/data/rr3/publications/CMIP5/output1, /g/data/xp65/public/apps/esmvaltool/replicas/cmip5/output1] -# CMIP3: /g/data/r87/DRSv3/CMIP3 -# OBS: /g/data/ct11/access-nri/replicas/esmvaltool/obsdata-v2 -# OBS6: /g/data/ct11/access-nri/replicas/esmvaltool/obsdata-v2 -# obs4MIPs: /g/data/ct11/access-nri/replicas/esmvaltool/obsdata-v2 -# ana4mips: /g/data/ct11/access-nri/replicas/esmvaltool/obsdata-v2 -# native6: /g/data/xp65/public/apps/esmvaltool/native6 -# -#drs: -# CMIP6: NCI -# CMIP5: NCI -# CMIP3: NCI -# CORDEX: ESGF -# obs4MIPs: default -# ana4mips: default diff --git a/doc/sphinx/source/community/dataset.rst b/doc/sphinx/source/community/dataset.rst index 424d4d4694..7a24e7c923 100644 --- a/doc/sphinx/source/community/dataset.rst +++ b/doc/sphinx/source/community/dataset.rst @@ -42,14 +42,15 @@ and run the recipe, to make sure the CMOR checks pass without warnings or errors To test a pull request for a new CMORizer script: -#. Download the data following the instructions included in the script and place - it in the ``RAWOBS`` path specified in your ``config-user.yml`` +#. Download the data following the instructions included in the script and + place it in the ``RAWOBS`` ``rootpath`` specified in your + :ref:`configuration ` #. If available, use the downloading script by running ``esmvaltool data download --config_file `` #. Run the cmorization by running ``esmvaltool data format `` #. Copy the resulting data to the ``OBS`` (for CMIP5 compliant data) or ``OBS6`` - (for CMIP6 compliant data) path specified in your - ``config-user.yml`` + (for CMIP6 compliant data) ``rootpath`` specified in your + :ref:`configuration ` #. Run ``recipes/examples/recipe_check_obs.yml`` with the new dataset to check that the data can be used diff --git a/doc/sphinx/source/community/diagnostic.rst b/doc/sphinx/source/community/diagnostic.rst index 285815f7cf..1be820f7b8 100644 --- a/doc/sphinx/source/community/diagnostic.rst +++ b/doc/sphinx/source/community/diagnostic.rst @@ -64,7 +64,7 @@ If it is just a few simple scripts or packaging is not possible (i.e. for NCL) y and paste the source code into the ``esmvaltool/diag_scripts`` directory. If you have existing code in a compiled language like -C, C++, or Fortran that you want to re-use, the recommended way to proceed is to add Python bindings and publish +C, C++, or Fortran that you want to reuse, the recommended way to proceed is to add Python bindings and publish the package on PyPI so it can be installed as a Python dependency. You can then call the functions it provides using a Python diagnostic. @@ -134,9 +134,8 @@ Diagnostic output Typically, diagnostic scripts create plots, but any other output such as e.g. text files or tables is also possible. Figures should be saved in the ``plot_dir``, either in both ``.pdf`` and -``.png`` format (preferred), or -respect the ``output_file_type`` specified in the -:ref:`esmvalcore:user configuration file`. +``.png`` format (preferred), or respect the :ref:`configuration option +` ``output_file_type`` . Data should be saved in the ``work_dir``, preferably as a ``.nc`` (`NetCDF `__) file, following the `CF-Conventions `__ as much as possible. @@ -181,7 +180,7 @@ human inspection. In addition to provenance information, a caption is also added to the plots. Provenance information from the recipe is automatically recorded by ESMValCore, whereas -diagnostic scripts must include code specifically to record provenance. See below for +diagnostic scripts must include code specifically to record provenance. See below for documentation of provenance attributes that can be included in a recipe. When contributing a diagnostic, please make sure it records the provenance, and that no warnings related to provenance are generated when running the recipe. @@ -252,7 +251,7 @@ for example plot_types: errorbar: error bar plot -To use these items, include them in the provenance record dictionary in the form +To use these items, include them in the provenance record dictionary in the form :code:`key: [value]` i.e. for the example above as :code:`'plot_types': ['errorbar']`. @@ -275,8 +274,8 @@ Always use :func:`esmvaltool.diag_scripts.shared.run_diagnostic` at the end of y with run_diagnostic() as config: main(config) -Create a ``provenance_record`` for each diagnostic file (i.e. image or data -file) that the diagnostic script outputs. The ``provenance_record`` is a +Create a ``provenance_record`` for each diagnostic file (i.e. image or data +file) that the diagnostic script outputs. The ``provenance_record`` is a dictionary of provenance items, for example: .. code-block:: python @@ -296,15 +295,15 @@ dictionary of provenance items, for example: 'statistics': ['mean'], } -To save a matplotlib figure, use the convenience function -:func:`esmvaltool.diag_scripts.shared.save_figure`. Similarly, to save Iris cubes use +To save a matplotlib figure, use the convenience function +:func:`esmvaltool.diag_scripts.shared.save_figure`. Similarly, to save Iris cubes use :func:`esmvaltool.diag_scripts.shared.save_data`. Both of these functions take ``provenance_record`` as an argument and log the provenance accordingly. Have a look at the example Python diagnostic in `esmvaltool/diag_scripts/examples/diagnostic.py `_ for a complete example. -For any other files created, you will need to make use of a +For any other files created, you will need to make use of a :class:`esmvaltool.diag_scripts.shared.ProvenanceLogger` to log provenance. Include the following code directly after the file is saved: @@ -489,7 +488,7 @@ This includes the following items: * In-code documentation (comments, docstrings) * Code quality (e.g. no hardcoded pathnames) * No Codacy errors reported -* Re-use of existing functions whenever possible +* Reuse of existing functions whenever possible * Provenance implemented Run recipe diff --git a/doc/sphinx/source/community/release_strategy/detailed_release_procedure.rst b/doc/sphinx/source/community/release_strategy/detailed_release_procedure.rst index a73643f454..d0d7f74672 100644 --- a/doc/sphinx/source/community/release_strategy/detailed_release_procedure.rst +++ b/doc/sphinx/source/community/release_strategy/detailed_release_procedure.rst @@ -49,7 +49,7 @@ and attach it in the release testing issue; to record the environment in a yaml Modifications to configuration files need to be documented as well. To test recipes, it is recommended to only use the default options and DKRZ data directories, simply by uncommenting -the DKRZ-Levante block of a newly generated ``config-user.yml`` file. +the DKRZ-Levante block of a :ref:`newly generated configuration file `. Submit run scripts - test recipe runs ------------------------------------- @@ -61,7 +61,7 @@ You will have to set the name of your environment, your email address (if you wa More information on running jobs with SLURM on DKRZ/Levante can be found in the DKRZ `documentation `_. -You can also specify the path to your ``config-user.yml`` file where ``max_parallel_tasks`` can be set. The script was found to work well with ``max_parallel_tasks=8``. Some recipes need to be run with ``max_parallel_tasks=1`` (large memory requirements, CMIP3 data, diagnostic issues, ...). These recipes are listed in `ONE_TASK_RECIPES`. +You can also specify the path to your configuration directory where ``max_parallel_tasks`` can be set in a YAML file. The script was found to work well with ``max_parallel_tasks=8``. Some recipes need to be run with ``max_parallel_tasks=1`` (large memory requirements, CMIP3 data, diagnostic issues, ...). These recipes are listed in `ONE_TASK_RECIPES`. Some recipes need other job requirements, you can add their headers in the `SPECIAL_RECIPES` dictionary. Otherwise the header will be written following the template that is written in the lines below. If you want to exclude recipes, you can do so by uncommenting the `exclude` lines. diff --git a/doc/sphinx/source/community/upgrading.rst b/doc/sphinx/source/community/upgrading.rst index 9ed7f8b5b1..9a9b37f178 100644 --- a/doc/sphinx/source/community/upgrading.rst +++ b/doc/sphinx/source/community/upgrading.rst @@ -145,7 +145,7 @@ Many operations previously performed by the diagnostic scripts, are now included The backend operations are fully controlled by the ``preprocessors`` section in the recipe. Here, a number of preprocessor sets can be defined, with different options for each of the operations. The sets defined in this section are applied in the ``diagnostics`` section to preprocess a given variable. -It is recommended to proceed step by step, porting and testing each operation separately before proceeding with the next one. A useful setting in the user configuration file (``config-private.yml``) called ``write_intermediary_cube`` allows writing out the variable field after each preprocessing step, thus facilitating the comparison with the old version (e.g., after CMORization, level selection, after regridding, etc.). The CMORization step of the new backend exactly corresponds to the operation performed by the old backend (and stored in the ``climo`` directory, now called ``preprec``): this is the very first step to be checked, by simply comparing the intermediary file produced by the new backend after CMORization with the output of the old backend in the ``climo`` directorsy (see "Testing" below for instructions). +It is recommended to proceed step by step, porting and testing each operation separately before proceeding with the next one. A useful setting in the configuration called ``write_intermediary_cube`` allows writing out the variable field after each preprocessing step, thus facilitating the comparison with the old version (e.g., after CMORization, level selection, after regridding, etc.). The CMORization step of the new backend exactly corresponds to the operation performed by the old backend (and stored in the ``climo`` directory, now called ``preprec``): this is the very first step to be checked, by simply comparing the intermediary file produced by the new backend after CMORization with the output of the old backend in the ``climo`` directorsy (see "Testing" below for instructions). The new backend also performs variable derivation, replacing the ``calculate`` function in the ``variable_defs`` scripts. If the recipe which is being ported makes use of derived variables, the corresponding calculation must be ported from the ``./variable_defs/.ncl`` file to ``./esmvaltool/preprocessor/_derive.py``. @@ -159,7 +159,7 @@ In the new version, all settings are centralized in the recipe, completely repla Make sure the diagnostic script writes NetCDF output ====================================================== -Each diagnostic script is required to write the output of the anaylsis in one or more NetCDF files. This is to give the user the possibility to further look into the results, besides the plots, but (most importantly) for tagging purposes when publishing the data in a report and/or on a website. +Each diagnostic script is required to write the output of the analysis in one or more NetCDF files. This is to give the user the possibility to further look into the results, besides the plots, but (most importantly) for tagging purposes when publishing the data in a report and/or on a website. For each of the plot produced by the diagnostic script a single NetCDF file has to be generated. The variable saved in this file should also contain all the necessary metadata that documents the plot (dataset names, units, statistical methods, etc.). The files have to be saved in the work directory (defined in `cfg['work_dir']` and `config_user_info@work_dir`, for the python and NCL diagnostics, respectively). @@ -209,7 +209,7 @@ Before submitting a pull request, the code should be cleaned to adhere to the co Update the documentation ======================== -If necessary, add or update the documentation for your recipes in the corrsponding rst file, which is now in ``doc\sphinx\source\recipes``. Do not forget to also add the documentation file to the list in ``doc\sphinx\source\annex_c`` to make sure it actually appears in the documentation. +If necessary, add or update the documentation for your recipes in the corresponding rst file, which is now in ``doc\sphinx\source\recipes``. Do not forget to also add the documentation file to the list in ``doc\sphinx\source\annex_c`` to make sure it actually appears in the documentation. Open a pull request =================== diff --git a/doc/sphinx/source/develop/dataset.rst b/doc/sphinx/source/develop/dataset.rst index f3c168a17c..f624a44feb 100644 --- a/doc/sphinx/source/develop/dataset.rst +++ b/doc/sphinx/source/develop/dataset.rst @@ -76,7 +76,7 @@ for downloading (e.g. providing contact information, licence agreements) and using the observations. The unformatted (raw) observations should then be stored in the appropriate of these three folders. -For each additional dataset, an entry needs to be made to the file +For each additional dataset, an entry needs to be made to the file `datasets.yml `_. The dataset entry should contain: @@ -92,10 +92,10 @@ of the cmorizing script (see Section `4. Create a cmorizer for the dataset`_). 3.1 Downloader script (optional) -------------------------------- -A Python script can be written to download raw observations +A Python script can be written to download raw observations from source and store the data in the appropriate tier subdirectory of the folder ``RAWOBS`` automatically. -There are many downloading scripts available in +There are many downloading scripts available in `/esmvaltool/cmorizers/data/downloaders/datasets/ `_ where several data download mechanisms are provided: @@ -108,18 +108,18 @@ Note that the name of this downloading script has to be identical to the name of the dataset. Depending on the source server, the downloading script needs to contain paths to -raw observations, filename patterns and various necessary fields to retrieve +raw observations, filename patterns and various necessary fields to retrieve the data. -Default ``start_date`` and ``end_date`` can be provided in cases where raw data +Default ``start_date`` and ``end_date`` can be provided in cases where raw data are stored in daily, monthly, and yearly files. The downloading script for the given dataset can be run with: .. code-block:: console - esmvaltool data download --config_file + esmvaltool data download --config_dir -The options ``--start`` and ``--end`` can be added to the command above to +The options ``--start`` and ``--end`` can be added to the command above to restrict the download of raw data to a time range. They will be ignored if a specific dataset does not support it (i.e. because it is provided as a single file). Valid formats are ``YYYY``, ``YYYYMM`` and ``YYYYMMDD``. By default, already downloaded data are not overwritten @@ -128,7 +128,7 @@ unless the option ``--overwrite=True`` is used. 4. Create a cmorizer for the dataset ==================================== -There are many cmorizing scripts available in +There are many cmorizing scripts available in `/esmvaltool/cmorizers/data/formatters/datasets/ `_ where solutions to many kinds of format issues with observational data are @@ -158,7 +158,7 @@ configuration file: `MTE.yml `_ in the directory ``ESMValTool/esmvaltool/cmorizers/data/cmor_config/``. Note that both the name of this configuration file and the cmorizing script have to be -identical to the name of your dataset. +identical to the name of your dataset. It is recommended that you set ``project`` to ``OBS6`` in the configuration file. That way, the variables defined in the CMIP6 CMOR table, augmented with the custom variables described above, are available to your script. @@ -188,7 +188,8 @@ The main body of the CMORizer script must contain a function called with this exact call signature. Here, ``in_dir`` corresponds to the input directory of the raw files, ``out_dir`` to the output directory of final reformatted data set, ``cfg`` to the dataset-specific configuration file, -``cfg_user`` to the user configuration file, ``start_date`` to the start +``cfg_user`` to the configuration object (which behaves basically like a +dictionary), ``start_date`` to the start of the period to format, and ``end_date`` to the end of the period to format. If not needed, the last three arguments can be ignored using underscores. The return value of this function is ignored. All @@ -256,9 +257,9 @@ The cmorizing script for the given dataset can be run with: .. code-block:: console - esmvaltool data format --config_file + esmvaltool data format --config_dir -The options ``--start`` and ``--end`` can be added to the command above to +The options ``--start`` and ``--end`` can be added to the command above to restrict the formatting of raw data to a time range. They will be ignored if a specific dataset does not support it (i.e. because it is provided as a single file). Valid formats are ``YYYY``, ``YYYYMM`` and ``YYYYMMDD``. @@ -267,12 +268,12 @@ does not support it (i.e. because it is provided as a single file). Valid format The output path given in the configuration file is the path where your cmorized dataset will be stored. The ESMValTool will create a folder - with the correct tier information + with the correct tier information (see Section `2. Edit your configuration file`_) if that tier folder is not - already available, and then a folder named after the dataset. + already available, and then a folder named after the dataset. In this folder the cmorized data set will be stored as a NetCDF file. The cmorized dataset will be automatically moved to the correct tier - subfolder of your OBS or OBS6 directory if the option + subfolder of your OBS or OBS6 directory if the option ``--install=True`` is used in the command above and no such directory was already created. @@ -284,9 +285,9 @@ the cmorizing scripts can be run in a single command with: .. code-block:: console - esmvaltool data prepare --config_file + esmvaltool data prepare --config_dir -Note that options from the ```esmvaltool data download`` and +Note that options from the ```esmvaltool data download`` and ``esmvaltool data format`` commands can be passed to the above command. 6. Naming convention of the observational data files diff --git a/doc/sphinx/source/faq.rst b/doc/sphinx/source/faq.rst index 10c72bd2cb..43251a801b 100644 --- a/doc/sphinx/source/faq.rst +++ b/doc/sphinx/source/faq.rst @@ -59,12 +59,17 @@ This is a useful functionality because it allows the user to `fix` things on-the quitting the Ipython console, code execution continues as per normal. -Use multiple config-user.yml files -================================== +Using multiple configuration directories +======================================== + +By default, ESMValTool will read YAML configuration files from the user +configuration directory ``~/.config/esmvaltool``, which can be changed with the +``ESMVALTOOL_CONFIG_DIR`` environment variable. +If required, users can specify the command line option ``--config_dir`` to +select another configuration directory, which is read **in addition** to the +user configuration directory +See the section on configuration :ref:`config_yaml_files` for details on this. -The user selects the configuration yaml file at run time. It's possible to -have several configurations files. For instance, it may be practical to have one -config file for debugging runs and another for production runs. Create a symbolic link to the latest output directory ===================================================== diff --git a/doc/sphinx/source/functionalities.rst b/doc/sphinx/source/functionalities.rst index 5b49c118a2..0098d95ded 100644 --- a/doc/sphinx/source/functionalities.rst +++ b/doc/sphinx/source/functionalities.rst @@ -12,9 +12,9 @@ that it can: - execute the workflow; and - output the desired collective data and media. -To facilitate these four steps, the user has control over the tool via -two main input files: the :ref:`user configuration file ` -and the :ref:`recipe `. The configuration file sets +To facilitate these four steps, the user has control over the tool via the +:ref:`configuration ` and the :ref:`recipe +`. The configuration sets user and site-specific parameters (like input and output paths, desired output graphical formats, logging level, etc.), whereas the recipe file sets data, preprocessing and diagnostic-specific parameters (data @@ -27,7 +27,7 @@ recyclable; the recipe file can be used for a large number of applications, since it may include as many datasets, preprocessors and diagnostics sections as the user deems useful. -Once the user configuration files and the recipe are at hand, the user +Once the configuration files and the recipe are at hand, the user can start the tool. A schematic overview of the ESMValTool workflow is depicted in the figure below. diff --git a/doc/sphinx/source/input.rst b/doc/sphinx/source/input.rst index f3562c2507..d743ede59f 100644 --- a/doc/sphinx/source/input.rst +++ b/doc/sphinx/source/input.rst @@ -76,7 +76,7 @@ For example, run to run the default example recipe and automatically download the required data to the directory ``~/climate_data``. -The data only needs to be downloaded once, every following run will re-use +The data only needs to be downloaded once, every following run will reuse previously downloaded data stored in this directory. See :ref:`esmvalcore:config-esgf` for a more in depth explanation and the available configuration options. @@ -117,7 +117,7 @@ OBS and OBS6 data is stored in the `esmeval` Group Workspace (GWS), and to be gr GWS, one must apply at https://accounts.jasmin.ac.uk/services/group_workspaces/esmeval/ ; after permission has been granted, the user is encouraged to use the data locally, and not move it elsewhere, to minimize both data transfers and stale disk usage; to note that Tier 3 data is subject to data protection restrictions; for further inquiries, -the GWS is adminstered by [Valeriu Predoi](mailto:valeriu.predoi@ncas.ac.uk). +the GWS is administered by [Valeriu Predoi](mailto:valeriu.predoi@ncas.ac.uk). Using a CMORizer script ----------------------- @@ -193,8 +193,8 @@ To CMORize one or more datasets, run: esmvaltool data format --config_file [CONFIG_FILE] [DATASET_LIST] -The path to the raw data to be CMORized must be specified in the :ref:`user -configuration file` as RAWOBS. +The ``rootpath`` to the raw data to be CMORized must be specified in the +:ref:`configuration ` as ``RAWOBS``. Within this path, the data are expected to be organized in subdirectories corresponding to the data tier: Tier2 for freely-available datasets (other than obs4MIPs and ana4mips) and Tier3 for restricted datasets (i.e., dataset which @@ -492,8 +492,8 @@ A list of all currently supported native datasets is :ref:`provided here A detailed description of how to include new native datasets is given :ref:`here `. -To use this functionality, users need to provide a path in the -:ref:`esmvalcore:user configuration file` for the ``native6`` project data +To use this functionality, users need to provide a ``rootpath`` in the +:ref:`configuration ` for the ``native6`` project data and/or the dedicated project used for the native dataset, e.g., ``ICON``. Then, in the recipe, they can refer to those projects. For example: diff --git a/doc/sphinx/source/quickstart/configuration.rst b/doc/sphinx/source/quickstart/configuration.rst index 34c29aac5c..9cea6413b6 100644 --- a/doc/sphinx/source/quickstart/configuration.rst +++ b/doc/sphinx/source/quickstart/configuration.rst @@ -1,4 +1,4 @@ -.. _config-user: +.. _config: ************* Configuration @@ -7,22 +7,23 @@ Configuration The ``esmvaltool`` command is provided by the ESMValCore package, the documentation on configuring ESMValCore can be found :ref:`here `. -In particular, it is recommended to read the section on the -:ref:`User configuration file ` -and the section on +An overview of all configuration options can be found +:ref:`here `. +In particular, it is recommended to read the section on how to :ref:`specify +configuration options ` and the section on :ref:`Finding data `. -To install the default configuration file in the default location, run +To install the default configuration in the default location, run .. code:: bash esmvaltool config get_config_user -Note that this file needs to be customized using the instructions above, so +Note that this needs to be customized using the instructions above, so the ``esmvaltool`` command can find the data on your system, before it can run a recipe. There is a lesson available in the `ESMValTool tutorial `_ -that describes how to personalize the configuration file. It can be found +that describes how to personalize the configuration. It can be found `at this site `_. diff --git a/doc/sphinx/source/quickstart/output.rst b/doc/sphinx/source/quickstart/output.rst index 4a33e8ca42..33836f1c9a 100644 --- a/doc/sphinx/source/quickstart/output.rst +++ b/doc/sphinx/source/quickstart/output.rst @@ -5,8 +5,9 @@ Output ****** ESMValTool automatically generates a new output directory with every run. The -location is determined by the output_dir option in the config-user.yml file, -the recipe name, and the date and time, using the the format: YYYYMMDD_HHMMSS. +location is determined by the :ref:`configuration option +` ``output_dir``, the recipe name, and the date and +time, using the the format: YYYYMMDD_HHMMSS. For instance, a typical output location would be: output_directory/recipe_ocean_amoc_20190118_1027/ @@ -33,13 +34,15 @@ The preprocessed datasets will be stored to the preproc/ directory. Each variable in each diagnostic will have its own the `metadata.yml`_ interface files saved in the preproc directory. -If the option ``save_intermediary_cubes`` is set to ``true`` in the -config-user.yml file, then the intermediary cubes will also be saved here. -This option is set to false in the default ``config-user.yml`` file. +If the :ref:`configuration option ` +``save_intermediary_cubes`` is set to ``true`` , then the intermediary cubes +will also be saved here. +This option is set to ``false`` by default. -If the option ``remove_preproc_dir`` is set to ``true`` in the config-user.yml -file, then the preproc directory will be deleted after the run completes. This -option is set to true in the default ``config-user.yml`` file. +If the :ref:`configuration option ` +``remove_preproc_dir`` is set to ``true`` , then the preproc directory will be +deleted after the run completes. +This option is set to ``true`` by default. Run @@ -70,8 +73,8 @@ Plots ===== The plots directory is where diagnostics save their output figures. These -plots are saved in the format requested by the option `output_file_type` in the -config-user.yml file. +plots are saved in the format requested by the :ref:`configuration option +` ``output_file_type``. Settings.yml @@ -81,10 +84,10 @@ The settings.yml file is automatically generated by ESMValCore. For each diagnos a unique settings.yml file will be produced. The settings.yml file passes several global level keys to diagnostic scripts. -This includes several flags from the config-user.yml file (such as -'write_netcdf', 'write_plots', etc...), several paths which are specific to the -diagnostic being run (such as 'plot_dir' and 'run_dir') and the location on -disk of the metadata.yml file (described below). +This includes several flags from the configuration (such as +``write_netcdf``, ``write_plots``, etc...), several paths which are specific to +the diagnostic being run (such as ``plot_dir`` and ``run_dir``) and the +location on disk of the metadata.yml file (described below). .. code-block:: yaml @@ -147,5 +150,5 @@ As you can see, this is effectively a dictionary with several items including data paths, metadata and other information. There are several tools available in python which are built to read and parse -these files. The tools are avaialbe in the shared directory in the diagnostics +these files. The tools are available in the shared directory in the diagnostics directory. diff --git a/doc/sphinx/source/quickstart/running.rst b/doc/sphinx/source/quickstart/running.rst index 7f9cadbaa1..20cb8620b0 100644 --- a/doc/sphinx/source/quickstart/running.rst +++ b/doc/sphinx/source/quickstart/running.rst @@ -39,20 +39,20 @@ from ESGF to the local directory ``~/climate_data``, run The ``--search_esgf=when_missing`` option tells ESMValTool to search for and download the necessary climate data files, if they cannot be found locally. -The data only needs to be downloaded once, every following run will re-use +The data only needs to be downloaded once, every following run will reuse previously downloaded data. If you have all required data available locally, you can run the tool with ``--search_esgf=never`` argument (the default). Note that in that case the required data should be located in the directories -specified in your user configuration file. +specified in the configuration (see :ref:`esmvalcore:config_option_rootpath`). A third option ``--search_esgf=always`` is available. With this option, the tool will first check the ESGF for the needed data, regardless of any local data availability; if the data found on ESGF is newer than the local data (if any) or the user specifies a version of the data that is available only from the ESGF, then that data will be downloaded; otherwise, local data will be used. -Recall that the chapter :ref:`Configuring ESMValTool ` -provides an explanation of how to create your own config-user.yml file. +Recall that the chapter on :ref:`configuring ESMValTool ` +provides an explanation of how to set up the configuration. See :ref:`running esmvaltool ` in the ESMValCore documentation for a more complete introduction to the ``esmvaltool`` command. diff --git a/doc/sphinx/source/recipes/recipe_carvalhais14nat.rst b/doc/sphinx/source/recipes/recipe_carvalhais14nat.rst index dc26a745e2..b551bbbdc5 100644 --- a/doc/sphinx/source/recipes/recipe_carvalhais14nat.rst +++ b/doc/sphinx/source/recipes/recipe_carvalhais14nat.rst @@ -73,7 +73,7 @@ The settings needed for loading the observational dataset in all diagnostics are provided in the recipe through `obs_info` within `obs_details` section. * ``obs_data_subdir``: subdirectory of auxiliary_data_dir (set in - config-user file) where observation data are stored {e.g., + configuration) where observation data are stored {e.g., data_ESMValTool_Carvalhais2014}. * ``source_label``: source data label {'Carvalhais2014'}. * ``variant_label``: variant of the observation {'BE'} for best estimate. @@ -112,7 +112,7 @@ Script land_carbon_cycle/diag_global_turnover.py * ``y0``: {``float``, 1.0} Y - coordinate of the upper edge of the figure. * ``wp``: {``float``, 1 / number of models} - width of each map. * ``hp``: {``float``, = wp} - height of each map. - * ``xsp``: {``float``, 0} - spacing betweeen maps in X - direction. + * ``xsp``: {``float``, 0} - spacing between maps in X - direction. * ``ysp``: {``float``, -0.03} - spacing between maps in Y -direction. Negative to reduce the spacing below default. * ``aspect_map``: {``float``, 0.5} - aspect of the maps. @@ -217,10 +217,10 @@ Due to inherent dependence of the diagnostic on uncertainty estimates in observation, the data needed for each diagnostic script are processed at different spatial resolutions (as in Carvalhais et al., 2014), and provided in 11 different resolutions (see Table 1). Note that the uncertainties were -estimated at the resolution of the selected models, and, thus, only the -pre-processed observed data can be used with the recipe. -It is not possible to use regridding functionalities of ESMValTool to regrid -the observational data to other spatial resolutions, as the uncertainty +estimated at the resolution of the selected models, and, thus, only the +pre-processed observed data can be used with the recipe. +It is not possible to use regridding functionalities of ESMValTool to regrid +the observational data to other spatial resolutions, as the uncertainty estimates cannot be regridded. Table 1. A summary of the observation datasets at different resolutions. @@ -309,7 +309,7 @@ Example plots Comparison of latitudinal (zonal) variations of pearson correlation between turnover time and climate: turnover time and precipitation, controlled for - temperature (left) and vice-versa (right). Reproduces figures 2c and 2d in + temperature (left) and vice-versa (right). Reproduces figures 2c and 2d in `Carvalhais et al. (2014)`_. .. _fig_carvalhais14nat_2: @@ -320,7 +320,7 @@ Example plots Comparison of observation-based and modelled ecosystem carbon turnover time. Along the diagnonal, tau_ctotal are plotted, above the bias, and below - density plots. The inset text in density plots indicate the correlation. + density plots. The inset text in density plots indicate the correlation. .. _fig_carvalhais14nat_3: @@ -328,11 +328,11 @@ Example plots :align: center :width: 80% - Global distributions of multimodel bias and model agreement. Multimodel bias - is calculated as the ratio of multimodel median turnover time and that from - observation. Stippling indicates the regions where only less than one - quarter of the models fall within the range of observational uncertainties - (`5^{th}` and `95^{th}` percentiles). Reproduces figure 3 in `Carvalhais et + Global distributions of multimodel bias and model agreement. Multimodel bias + is calculated as the ratio of multimodel median turnover time and that from + observation. Stippling indicates the regions where only less than one + quarter of the models fall within the range of observational uncertainties + (`5^{th}` and `95^{th}` percentiles). Reproduces figure 3 in `Carvalhais et al. (2014)`_. .. _fig_carvalhais14nat_4: @@ -341,7 +341,7 @@ Example plots :align: center :width: 80% - Comparison of latitudinal (zonal) variations of observation-based and - modelled ecosystem carbon turnover time. The zonal turnover time is - calculated as the ratio of zonal `ctotal` and `gpp`. Reproduces figures 2a + Comparison of latitudinal (zonal) variations of observation-based and + modelled ecosystem carbon turnover time. The zonal turnover time is + calculated as the ratio of zonal `ctotal` and `gpp`. Reproduces figures 2a and 2b in `Carvalhais et al. (2014)`_. diff --git a/doc/sphinx/source/recipes/recipe_climwip.rst b/doc/sphinx/source/recipes/recipe_climwip.rst index 0928ba939f..900698b85a 100644 --- a/doc/sphinx/source/recipes/recipe_climwip.rst +++ b/doc/sphinx/source/recipes/recipe_climwip.rst @@ -43,9 +43,9 @@ Using shapefiles for cutting scientific regions To use shapefiles for selecting SREX or AR6 regions by name it is necessary to download them, e.g., from the sources below and reference the file using the `shapefile` parameter. This can either be a -absolute or a relative path. In the example recipes they are stored in a subfolder `shapefiles` -in the `auxiliary_data_dir` (with is specified in the -`config-user.yml `_). +absolute or a relative path. In the example recipes they are stored in a subfolder `shapefiles` +in the :ref:`configuration option ` +``auxiliary_data_dir``. SREX regions (AR5 reference regions): http://www.ipcc-data.org/guidelines/pages/ar5_regions.html @@ -249,7 +249,7 @@ Brunner et al. (2020) recipe and example independence weighting The recipe uses an additional step between pre-processor and weight calculation to calculate anomalies relative to the global mean (e.g., tas_ANOM = tas_CLIM - global_mean(tas_CLIM)). This means we do not use the absolute temperatures of a model as performance criterion but rather the horizontal temperature distribution (see `Brunner et al. 2020 `_ for a discussion). -This recipe also implements a somewhat general independence weighting for CMIP6. In contrast to model performance (which should be case specific) model independence can largely be seen as only dependet on the multi-model ensemble in use but not the target variable or region. This means that the configuration used should be valid for similar subsets of CMIP6 as used in this recipe: +This recipe also implements a somewhat general independence weighting for CMIP6. In contrast to model performance (which should be case specific) model independence can largely be seen as only dependent on the multi-model ensemble in use but not the target variable or region. This means that the configuration used should be valid for similar subsets of CMIP6 as used in this recipe: .. code-block:: yaml diff --git a/doc/sphinx/source/recipes/recipe_gier20bg.rst b/doc/sphinx/source/recipes/recipe_gier20bg.rst index bb11770a24..b8f8fb9b8e 100644 --- a/doc/sphinx/source/recipes/recipe_gier20bg.rst +++ b/doc/sphinx/source/recipes/recipe_gier20bg.rst @@ -53,7 +53,7 @@ User settings in recipe * Optional diag_script_info attributes: * ``styleset``: styleset for color coding panels - * ``output_file_type``: output file type for plots, default: config_user -> png + * ``output_file_type``: output file type for plots, default: png * ``var_plotname``: NCL string formatting how variable should be named in plots defaults to short_name if not assigned. @@ -64,7 +64,7 @@ User settings in recipe amplitude contour plot * Optional diag_script_info attributes: - * ``output_file_type``: output file type for plots, default: config_user -> png + * ``output_file_type``: output file type for plots, default: png #. Script xco2_analysis/main.ncl: @@ -77,7 +77,7 @@ User settings in recipe accounting for the ensemble member named in "ensemble_refs" * Optional diag_script_info attributes: - * ``output_file_type``: output file type for plots, default: config_user -> png + * ``output_file_type``: output file type for plots, default: png * ``ensemble_refs``: list of model-ensemble pairs to denote which ensemble member to use for calculating multi-model mean. required if ensemble_mean = true @@ -97,17 +97,17 @@ User settings in recipe * ``plot_var2_mean``: If True adds mean of seasonal cycle to panel as string. * Optional diag_script_info attributes: - * ``output_file_type``: output file type for plots, default: config_user -> png + * ``output_file_type``: output file type for plots, default: png * ``var_plotname``: String formatting how variable should be named in plots defaults to short_name if not assigned #. Script xco2_analysis/sat_masks.ncl: * Optional diag_script_info attributes: - * ``output_file_type``: output file type for plots, default: config_user -> png + * ``output_file_type``: output file type for plots, default: png * ``var_plotname``: String formatting how variable should be named in plots defaults to short_name if not assigned - * ``c3s_plots``: Missing value plots seperated by timeseries of c3s satellites + * ``c3s_plots``: Missing value plots separated by timeseries of c3s satellites #. Script xco2_analysis/station_comparison.ncl: @@ -116,7 +116,7 @@ User settings in recipe first, then 2D variable, followed by surface stations * Optional diag_script_info attributes: - * ``output_file_type``: output file type for plots, default: config_user -> png + * ``output_file_type``: output file type for plots, default: png * ``var_plotnames``: String formatting how variables should be named in plots defaults to short_name if not assigned * ``overwrite_altitudes``: Give other altitude values than the ones attached in diff --git a/doc/sphinx/source/recipes/recipe_hydrology.rst b/doc/sphinx/source/recipes/recipe_hydrology.rst index d0e2e0bcb3..995a70b3ae 100644 --- a/doc/sphinx/source/recipes/recipe_hydrology.rst +++ b/doc/sphinx/source/recipes/recipe_hydrology.rst @@ -62,13 +62,13 @@ Diagnostics are stored in esmvaltool/diag_scripts/hydrology * wflow.py * lisflood.py * hype.py - * globwat.py + * globwat.py User settings in recipe ----------------------- -All hydrological recipes require a shapefile as an input to produce forcing data. This shapefile determines the shape of the basin for which the data will be cut out and processed. All recipes are tested with `the shapefiles `_ that are used for the eWaterCycle project. In principle any shapefile can be used, for example, the freely available basin shapefiles from the `HydroSHEDS project `_. +All hydrological recipes require a shapefile as an input to produce forcing data. This shapefile determines the shape of the basin for which the data will be cut out and processed. All recipes are tested with `the shapefiles `_ that are used for the eWaterCycle project. In principle any shapefile can be used, for example, the freely available basin shapefiles from the `HydroSHEDS project `_. #. recipe_pcrglobwb.yml @@ -87,7 +87,7 @@ All hydrological recipes require a shapefile as an input to produce forcing data *extract_shape:* - * shapefile: Meuse.shp (MARRMoT is a hydrological Lumped model that needs catchment-aggregated forcing data. The catchment is provided as a shapefile, the path can be relative to ``auxiliary_data_dir`` as defined in config-user.yml.). + * shapefile: Meuse.shp (MARRMoT is a hydrological Lumped model that needs catchment-aggregated forcing data. The catchment is provided as a shapefile, the path can be relative to :ref:`configuration option ` ``auxiliary_data_dir``). * method: contains * crop: true @@ -107,7 +107,7 @@ All hydrological recipes require a shapefile as an input to produce forcing data * dem_file: netcdf file containing a digital elevation model with elevation in meters and coordinates latitude and longitude. A wflow example dataset is available at: https://github.com/openstreams/wflow/tree/master/examples/wflow_rhine_sbm - The example dem_file can be obtained from https://github.com/openstreams/wflow/blob/master/examples/wflow_rhine_sbm/staticmaps/wflow_dem.map + The example dem_file can be obtained from https://github.com/openstreams/wflow/blob/master/examples/wflow_rhine_sbm/staticmaps/wflow_dem.map * regrid: the regridding scheme for regridding to the digital elevation model. Choose ``area_weighted`` (slow) or ``linear``. #. recipe_lisflood.yml diff --git a/doc/sphinx/source/recipes/recipe_ipccwg1ar6ch3.rst b/doc/sphinx/source/recipes/recipe_ipccwg1ar6ch3.rst index 42bedcec09..718c345b19 100644 --- a/doc/sphinx/source/recipes/recipe_ipccwg1ar6ch3.rst +++ b/doc/sphinx/source/recipes/recipe_ipccwg1ar6ch3.rst @@ -6,7 +6,7 @@ IPCC AR6 Chapter 3 (selected figures) Overview -------- -This recipe collects selected diagnostics used in IPCC AR6 WGI Chapter 3: +This recipe collects selected diagnostics used in IPCC AR6 WGI Chapter 3: Human influence on the climate system (`Eyring et al., 2021`_). Plots from IPCC AR6 can be readily reproduced and compared to previous versions. The aim is to be able to start with what was available now the next time allowing us to focus @@ -15,7 +15,8 @@ on developing more innovative analysis methods rather than constantly having to Processing of CMIP3 models currently works only in serial mode, due to an issue in the input data still under investigation. To run the recipe for Fig 3.42a -and Fig. 3.43 set "max_parallel_tasks: 1" in the config-user.yml file. +and Fig. 3.43 set the :ref:`configuration option ` +``max_parallel_tasks: 1``. The plots are produced collecting the diagnostics from individual recipes. The following figures from `Eyring et al. (2021)`_ can currently be reproduced: @@ -43,10 +44,9 @@ To reproduce Fig. 3.9 you need the shapefile of the `AR6 reference regions (`Iturbide et al., 2020 `_). Please download the file `IPCC-WGI-reference-regions-v4_shapefile.zip `_, -unzip and store it in `/IPCC-regions/` (the `auxiliary_data_dir` -is defined in the `config-user.yml -`_ -file). +unzip and store it in `/IPCC-regions/` (where +``auxiliary_data_dir`` is given as :ref:`configuration option +`). .. _`Eyring et al., 2021`: https://www.ipcc.ch/report/ar6/wg1/chapter/chapter-3/ .. _`Eyring et al. (2021)`: https://www.ipcc.ch/report/ar6/wg1/chapter/chapter-3/ @@ -179,7 +179,7 @@ User settings in recipe * start_year: start year in figure * end_year: end year in figure - * panels: list of variable blocks for each panel + * panels: list of variable blocks for each panel *Optional settings for script* @@ -205,7 +205,7 @@ User settings in recipe * plot_units: variable unit for plotting * y-min: set min of y-axis * y-max: set max of y-axis - * order: order in which experiments should be plotted + * order: order in which experiments should be plotted * stat_shading: if true: shading of statistic range * ref_shading: if true: shading of reference period @@ -225,7 +225,7 @@ User settings in recipe * plot_legend: if true, plot legend will be plotted * plot_units: variable unit for plotting - * multi_model_mean: if true, multi-model mean and uncertaintiy will be + * multi_model_mean: if true, multi-model mean and uncertainty will be plotted *Optional settings for variables* @@ -304,7 +304,7 @@ User settings in recipe * labels: List of labels for each variable on the x-axis * model_spread: if True, model spread is shaded * plot_median: if True, median is plotted - * project_order: give order of projects + * project_order: give order of projects Variables @@ -452,7 +452,7 @@ Example plots 2013). For line colours see the legend of Figure 3.4. Additionally, the multi-model mean (red) and standard deviation (grey shading) are shown. Observational and model datasets were detrended by removing the - least-squares quadratic trend. + least-squares quadratic trend. .. figure:: /recipes/figures/ipccwg1ar6ch3/tas_anom_damip_global_1850-2020.png :align: center @@ -467,7 +467,7 @@ Example plots anomalies are shown relative to 1950-2010 for Antarctica and relative to 1850-1900 for other continents. CMIP6 historical simulations are expanded by the SSP2-4.5 scenario simulations. All available ensemble members were used. - Regions are defined by Iturbide et al. (2020). + Regions are defined by Iturbide et al. (2020). .. figure:: /recipes/figures/ipccwg1ar6ch3/model_bias_pr_annualclim_CMIP6.png :align: center @@ -487,7 +487,7 @@ Example plots show a change greater than the variability threshold; crossed lines indicate regions with conflicting signal, where >=66% of models show change greater than the variability threshold and <80% of all models agree on the sign of - change. + change. .. figure:: /recipes/figures/ipccwg1ar6ch3/precip_anom_1950-2014.png :align: center @@ -511,7 +511,7 @@ Example plots forcings (brown) and natural forcings only (blue). Observed trends for each observational product are shown as horizontal lines. Panel (b) shows annual mean precipitation rate (mm day-1) of GHCN version 2 for the years 1950-2014 - over land areas used to compute the plots. + over land areas used to compute the plots. .. figure:: /recipes/figures/ipccwg1ar6ch3/zonal_westerly_winds.png :align: center diff --git a/doc/sphinx/source/recipes/recipe_kcs.rst b/doc/sphinx/source/recipes/recipe_kcs.rst index fa07f0a167..1ed117ecb6 100644 --- a/doc/sphinx/source/recipes/recipe_kcs.rst +++ b/doc/sphinx/source/recipes/recipe_kcs.rst @@ -30,7 +30,7 @@ In the second diagnostic, for both the control and future periods, the N target 2. Further constrain the selection by picking samples that represent either high or low changes in summer precipitation and summer and winter temperature, by limiting the remaining samples to certain percentile ranges: relatively wet/cold in the control and dry/warm in the future, or vice versa. The percentile ranges are listed in table 1 of Lenderink 2014's supplement. This should result is approximately 50 remaining samples for each scenario, for both control and future. 3. Use a Monte-Carlo method to make a final selection of 8 resamples with minimal reuse of the same ensemble member/segment. -Datasets have been split in two parts: the CMIP datasets and the target model datasets. An example use case for this recipe is to compare between CMIP5 and CMIP6, for example. The recipe can work with a target model that is not part of CMIP, provided that the data are CMOR compatible, and using the same data referece syntax as the CMIP data. Note that you can specify :ref:`multiple data paths` in the user configuration file. +Datasets have been split in two parts: the CMIP datasets and the target model datasets. An example use case for this recipe is to compare between CMIP5 and CMIP6, for example. The recipe can work with a target model that is not part of CMIP, provided that the data are CMOR compatible, and using the same data reference syntax as the CMIP data. Note that you can specify :ref:`multiple data paths` in the configuration. Available recipes and diagnostics @@ -128,7 +128,7 @@ AND highlighting the selected steering parameters and resampling periods: .. figure:: /recipes/figures/kcs/global_matching.png :align: center -The diagnostic ``local_resampling`` procudes a number of output files: +The diagnostic ``local_resampling`` produces a number of output files: * ``season_means_.nc``: intermediate results, containing the season means for each segment of the original target model ensemble. * ``top1000_.csv``: intermediate results, containing the 1000 combinations that have been selected based on winter mean precipitation. diff --git a/doc/sphinx/source/recipes/recipe_model_evaluation.rst b/doc/sphinx/source/recipes/recipe_model_evaluation.rst index 9e199815e0..c61f34aa62 100644 --- a/doc/sphinx/source/recipes/recipe_model_evaluation.rst +++ b/doc/sphinx/source/recipes/recipe_model_evaluation.rst @@ -35,9 +35,9 @@ User settings ------------- It is recommended to use a vector graphic file type (e.g., SVG) for the output -format when running this recipe, i.e., run the recipe with the command line -option ``--output_file_type=svg`` or use ``output_file_type: svg`` in your -:ref:`esmvalcore:user configuration file`. +format when running this recipe, i.e., run the recipe with the +:ref:`configuration options ` ``output_file_type: +svg``. Note that map and profile plots are rasterized by default. Use ``rasterize: false`` in the recipe to disable this. diff --git a/doc/sphinx/source/recipes/recipe_monitor.rst b/doc/sphinx/source/recipes/recipe_monitor.rst index ee3b9b44fa..8f4893fc12 100644 --- a/doc/sphinx/source/recipes/recipe_monitor.rst +++ b/doc/sphinx/source/recipes/recipe_monitor.rst @@ -36,9 +36,9 @@ User settings ------------- It is recommended to use a vector graphic file type (e.g., SVG) for the output -files when running this recipe, i.e., run the recipe with the command line -option ``--output_file_type=svg`` or use ``output_file_type: svg`` in your -:ref:`esmvalcore:user configuration file`. +format when running this recipe, i.e., run the recipe with the +:ref:`configuration options ` ``output_file_type: +svg``. Note that map and profile plots are rasterized by default. Use ``rasterize_maps: false`` or ``rasterize: false`` (see `Recipe settings`_) in the recipe to disable this. diff --git a/doc/sphinx/source/recipes/recipe_oceans.rst b/doc/sphinx/source/recipes/recipe_oceans.rst index d8bf3143e1..17552b39fa 100644 --- a/doc/sphinx/source/recipes/recipe_oceans.rst +++ b/doc/sphinx/source/recipes/recipe_oceans.rst @@ -458,7 +458,7 @@ and a latitude and longitude coordinates. This diagnostic also includes the optional arguments, `maps_range` and `diff_range` to manually define plot ranges. Both arguments are a list of two floats -to set plot range minimun and maximum values respectively for Model and Observations +to set plot range minimum and maximum values respectively for Model and Observations maps (Top panels) and for the Model minus Observations panel (bottom left). Note that if input data have negative values the Model over Observations map (bottom right) is not produced. @@ -491,14 +491,14 @@ diagnostic_maps_multimodel.py The diagnostic_maps_multimodel.py_ diagnostic makes model(s) vs observations maps and if data are not provided it draws only model field. -It is always nessary to define the overall layout trough the argument `layout_rowcol`, +It is always necessary to define the overall layout through the argument `layout_rowcol`, which is a list of two integers indicating respectively the number of rows and columns to organize the plot. Observations has not be accounted in here as they are automatically added at the top of the figure. This diagnostic also includes the optional arguments, `maps_range` and `diff_range` to manually define plot ranges. Both arguments are a list of two floats -to set plot range minimun and maximum values respectively for variable data and +to set plot range minimum and maximum values respectively for variable data and the Model minus Observations range. Note that this diagnostic assumes that the preprocessors do the bulk of the @@ -748,7 +748,7 @@ These tools are: - bgc_units: converts to sensible units where appropriate (ie Celsius, mmol/m3) - timecoord_to_float: Converts time series to decimal time ie: Midnight on January 1st 1970 is 1970.0 - add_legend_outside_right: a plotting tool, which adds a legend outside the axes. -- get_image_format: loads the image format, as defined in the global user config.yml. +- get_image_format: loads the image format, as defined in the global configuration. - get_image_path: creates a path for an image output. - make_cube_layer_dict: makes a dictionary for several layers of a cube. @@ -762,8 +762,8 @@ A note on the auxiliary data directory Some of these diagnostic scripts may not function on machines with no access to the internet, as cartopy may try to download the shape files. The solution to this issue is the put the relevant cartopy shapefiles in a directory which -is visible to esmvaltool, then link that path to ESMValTool via -the `auxiliary_data_dir` variable in your config-user.yml file. +is visible to esmvaltool, then link that path to ESMValTool via the +:ref:`configuration option ` ``auxiliary_data_dir``. The cartopy masking files can be downloaded from: https://www.naturalearthdata.com/downloads/ diff --git a/doc/sphinx/source/recipes/recipe_rainfarm.rst b/doc/sphinx/source/recipes/recipe_rainfarm.rst index d6c06c6f7a..aeb7cd0638 100644 --- a/doc/sphinx/source/recipes/recipe_rainfarm.rst +++ b/doc/sphinx/source/recipes/recipe_rainfarm.rst @@ -32,7 +32,7 @@ User settings * nf: number of subdivisions for downscaling (e.g. 8 will produce output fields with linear resolution increased by a factor 8) * conserv_glob: logical, if to conserve precipitation over full domain * conserv_smooth: logical, if to conserve precipitation using convolution (if neither conserv_glob or conserv_smooth is chosen, box conservation is used) -* weights_climo: set to false or omit if no orographic weights are to be used, else set it to the path to a fine-scale precipitation climatology file. If a relative file path is used, `auxiliary_data_dir` will be searched for this file. The file is expected to be in NetCDF format and should contain at least one precipitation field. If several fields at different times are provided, a climatology is derived by time averaging. Suitable climatology files could be for example a fine-scale precipitation climatology from a high-resolution regional climate model (see e.g. Terzago et al. 2018), a local high-resolution gridded climatology from observations, or a reconstruction such as those which can be downloaded from the WORLDCLIM (http://www.worldclim.org) or CHELSA (http://chelsa-climate.org) websites. The latter data will need to be converted to NetCDF format before being used (see for example the GDAL tools (https://www.gdal.org). +* weights_climo: set to false or omit if no orographic weights are to be used, else set it to the path to a fine-scale precipitation climatology file. If a relative file path is used, ``auxiliary_data_dir`` will be searched for this file. The file is expected to be in NetCDF format and should contain at least one precipitation field. If several fields at different times are provided, a climatology is derived by time averaging. Suitable climatology files could be for example a fine-scale precipitation climatology from a high-resolution regional climate model (see e.g. Terzago et al. 2018), a local high-resolution gridded climatology from observations, or a reconstruction such as those which can be downloaded from the WORLDCLIM (http://www.worldclim.org) or CHELSA (http://chelsa-climate.org) websites. The latter data will need to be converted to NetCDF format before being used (see for example the GDAL tools (https://www.gdal.org). Variables @@ -60,4 +60,4 @@ Example plots .. figure:: /recipes/figures/rainfarm/rainfarm.png :width: 14cm - Example of daily cumulated precipitation from the CMIP5 EC-EARTH model on a specific day, downscaled using RainFARM from its original resolution (1.125°) (left panel), increasing spatial resolution by a factor of 8 to 0.14°; Two stochastic realizations are shown (central and right panel). A fixed spectral slope of s=1.7 was used. Notice how the downscaled fields introduce fine scale precipitation structures, while still maintaining on average the original coarse-resolution precipitation. Different stochastic realizations are shown to demonstrate how an ensemble of realizations can be used to reproduce unresolved subgrid variability. (N.B.: this plot was not produced by ESMValTool - the recipe output is netcdf only). + Example of daily cumulated precipitation from the CMIP5 EC-EARTH model on a specific day, downscaled using RainFARM from its original resolution (1.125°) (left panel), increasing spatial resolution by a factor of 8 to 0.14°; Two stochastic realizations are shown (central and right panel). A fixed spectral slope of s=1.7 was used. Notice how the downscaled fields introduce fine scale precipitation structures, while still maintaining on average the original coarse-resolution precipitation. Different stochastic realizations are shown to demonstrate how an ensemble of realizations can be used to reproduce unresolved subgrid variability. (N.B.: this plot was not produced by ESMValTool - the recipe output is netcdf only). diff --git a/doc/sphinx/source/recipes/recipe_shapeselect.rst b/doc/sphinx/source/recipes/recipe_shapeselect.rst index 63afbcae6c..12da974c28 100644 --- a/doc/sphinx/source/recipes/recipe_shapeselect.rst +++ b/doc/sphinx/source/recipes/recipe_shapeselect.rst @@ -29,7 +29,7 @@ User settings in recipe *Required settings (scripts)* - * shapefile: path to the user provided shapefile. A relative path is relative to the auxiliary_data_dir as configured in config-user.yml. + * shapefile: path to the user provided shapefile. A relative path is relative to the :ref:`configuration option ` ``auxiliary_data_dir``. * weighting_method: the preferred weighting method 'mean_inside' - mean of all grid points inside polygon; 'representative' - one point inside or close to the polygon is used to represent the complete area. diff --git a/doc/sphinx/source/recipes/recipe_wenzel14jgr.rst b/doc/sphinx/source/recipes/recipe_wenzel14jgr.rst index 3c7fa86a3a..4faa05c2a9 100644 --- a/doc/sphinx/source/recipes/recipe_wenzel14jgr.rst +++ b/doc/sphinx/source/recipes/recipe_wenzel14jgr.rst @@ -28,8 +28,8 @@ User settings .. note:: - Make sure to run this recipe setting ``max_parallel_tasks: 1`` in the ``config_user.yml`` - file or using the CLI flag ``--max_parallel_tasks=1``. + Make sure to run this recipe with the :ref:`configuration option + ` ``max_parallel_tasks: 1``. User setting files (cfg files) are stored in nml/cfg_carbon/ diff --git a/doc/sphinx/source/recipes/recipe_wenzel16nat.rst b/doc/sphinx/source/recipes/recipe_wenzel16nat.rst index 03bb822545..a661844e70 100644 --- a/doc/sphinx/source/recipes/recipe_wenzel16nat.rst +++ b/doc/sphinx/source/recipes/recipe_wenzel16nat.rst @@ -35,9 +35,8 @@ User settings .. note:: - Make sure to run this recipe setting ``output_file_type: pdf`` in the ``config_user.yml`` - file or using the CLI flag ``--output_file_type=pdf``. - + Make sure to run this recipe with the :ref:`configuration option + ` ``max_parallel_tasks: 1``. #. Script carbon_beta.ncl @@ -58,7 +57,7 @@ User settings none -#. Script carbon_co2_cycle.ncl +#. Script carbon_co2_cycle.ncl *Required Settings (scripts)* @@ -72,7 +71,7 @@ User settings *Required settings (variables)* - * reference_dataset: name of reference datatset (observations) + * reference_dataset: name of reference dataset (observations) *Optional settings (variables)* @@ -102,15 +101,15 @@ Example plots ------------- .. figure:: /recipes/figures/wenzel16nat/fig_1.png - :width: 12 cm + :width: 12 cm :align: center - + Comparison of CO\ :sub:`2` seasonal amplitudes for CMIP5 historical simulations and observations showing annual mean atmospheric CO\ :sub:`2` versus the amplitudes of the CO\ :sub:`2` seasonal cycle at Pt. Barrow, Alaska (produced with carbon_co2_cycle.ncl, similar to Fig. 1a from Wenzel et al. (2016)). - + .. figure:: /recipes/figures/wenzel16nat/fig_2.png - :width: 12 cm + :width: 12 cm :align: center - + Barchart showing the gradient of the linear correlations for the comparison of CO\ :sub:`2` seasonal amplitudes for CMIP5 historical for at Pt. Barrow, Alaska (produced with carbon_co2_cycle.ncl, similar to Fig. 1b from Wenzel et al. (2016)). .. figure:: /recipes/figures/wenzel16nat/fig_3.png diff --git a/doc/sphinx/source/utils.rst b/doc/sphinx/source/utils.rst index 49c3df7aef..536b78ebee 100644 --- a/doc/sphinx/source/utils.rst +++ b/doc/sphinx/source/utils.rst @@ -135,10 +135,11 @@ This suite is configured to work with versions of cylc older than 8.0.0 . To prepare for using this tool: #. Log in to a system that uses `slurm `_ -#. Make sure the required CMIP and observational datasets are available and configured in config-user.yml +#. Make sure the required CMIP and observational datasets are available and + their ``rootpath`` and ``drs`` is properly set up in the :ref:`configuration + ` #. Make sure the required auxiliary data is available (see :ref:`recipe documentation `) #. Install ESMValTool -#. Update config-user.yml so it points to the right data locations Next, get started with `cylc `_: @@ -181,7 +182,7 @@ The following parameters have to be set in the script in order to make it run: Optionally, the following parameters can be edited: -* ``config_file``, *str*: Path to ``config-user.yml`` if default ``~/.esmvaltool/config-user.yml`` not used. +* ``config_dir``, *str*: Path to :ref:`configuration directory `, by default ``~/.config/esmvaltool/``. * ``partition``, *str*: Name of the DKRZ partition used to run jobs. Default is ``interactive`` to minimize computing cost compared to ``compute`` for which nodes cannot be shared. * ``memory``, *str*: Amount of memory requested for each run. Default is ``64G`` to allow to run 4 recipes on the same node in parallel. * ``time``, *str*: Time limit. Default is ``04:00:00`` to increase the job priority. Jobs can run for up to 8 hours and 12 hours on the compute and interactive partitions, respectively. @@ -230,7 +231,7 @@ script as well as a list of all available recipes. To generate the list, run the for recipe in $(esmvaltool recipes list | grep '\.yml$'); do echo $(basename "$recipe"); done > all_recipes.txt -To keep the script execution fast, it is recommended to use ``log_level: info`` in your config-user.yml file so that SLURM +To keep the script execution fast, it is recommended to use ``log_level: info`` in the configuration so that SLURM output files are rather small. .. _overview_page: diff --git a/esmvaltool/cmorizers/data/cmorizer.py b/esmvaltool/cmorizers/data/cmorizer.py index 16b7666350..5e66b7a70f 100755 --- a/esmvaltool/cmorizers/data/cmorizer.py +++ b/esmvaltool/cmorizers/data/cmorizer.py @@ -10,6 +10,7 @@ import os import shutil import subprocess +import warnings from pathlib import Path import esmvalcore @@ -18,13 +19,14 @@ from esmvalcore.config import CFG from esmvalcore.config._logging import configure_logging +from esmvaltool import ESMValToolDeprecationWarning from esmvaltool.cmorizers.data.utilities import read_cmor_config logger = logging.getLogger(__name__) datasets_file = os.path.join(os.path.dirname(__file__), 'datasets.yml') -class Formatter(): +class _Formatter(): """ Class to manage the download and formatting of datasets. @@ -39,26 +41,40 @@ def __init__(self, info): self.datasets_info = info self.config = '' - def start(self, command, datasets, config_file, options): + def start(self, command, datasets, config_file, config_dir, options): """Read configuration and set up formatter for data processing. Parameters ---------- command: str - Name of the command to execute + Name of the command to execute. datasets: str - List of datasets to process, comma separated + List of datasets to process, comma separated. config_file: str - Config file to use + Config file to use. Option will be removed in v2.14.0. + config_dir: str + Config directory to use. options: dict() - Extra options to overwrite config user file + Extra options to overwrite configuration. + """ if isinstance(datasets, str): self.datasets = datasets.split(',') else: self.datasets = datasets - CFG.load_from_file(config_file) + if config_file is not None: # remove in v2.14.0 + CFG.load_from_file(config_file) + elif config_dir is not None: + config_dir = Path( + os.path.expandvars(config_dir) + ).expanduser().absolute() + if not config_dir.is_dir(): + raise NotADirectoryError( + f"Invalid --config_dir given: {config_dir} is not an " + f"existing directory" + ) + CFG.update_from_dirs([config_dir]) CFG.update(options) self.config = CFG.start_session(f'data_{command}') @@ -199,8 +215,9 @@ def format(self, start, end, install): failed_datasets.append(dataset) if failed_datasets: - raise Exception( - f'Format failed for datasets {" ".join(failed_datasets)}') + raise RuntimeError( + f'Format failed for datasets {" ".join(failed_datasets)}' + ) @staticmethod def has_downloader(dataset): @@ -400,7 +417,7 @@ class DataCommand(): def __init__(self): with open(datasets_file, 'r', encoding='utf8') as data: self._info = yaml.safe_load(data) - self.formatter = Formatter(self._info) + self.formatter = _Formatter(self._info) def _has_downloader(self, dataset): return 'Yes' if self.formatter.has_downloader(dataset) else "No" @@ -441,28 +458,48 @@ def download(self, start=None, end=None, overwrite=False, + config_dir=None, **kwargs): """Download datasets. Parameters ---------- - datasets : list(str) + datasets: list(str) List of datasets to format - config_file : str, optional - Path to ESMValTool's config user file, by default None - start : str, optional + config_file: str, optional + Path to ESMValTool's config user file, by default None. + + .. deprecated:: 2.12.0 + This option has been deprecated in ESMValTool version 2.12.0 + and is scheduled for removal in version 2.14.0. Please use the + option `config_dir` instead. + start: str, optional Start of the interval to process, by default None. Valid formats are YYYY, YYYYMM and YYYYMMDD. - end : str, optional + end: str, optional End of the interval to process, by default None. Valid formats are YYYY, YYYYMM and YYYYMMDD. - overwrite : bool, optional + overwrite: bool, optional If true, download already present data again + config_dir: str, optional + Path to additional ESMValTool configuration directory. See + :ref:`esmvalcore:config_yaml_files` for details. + """ + if config_file is not None: + msg = ( + "The option `config_file` has been deprecated in ESMValTool " + "version 2.12.0 and is scheduled for removal in version " + "2.14.0. Please use the option ``config_dir`` instead." + ) + warnings.warn(msg, ESMValToolDeprecationWarning) + start = self._parse_date(start) end = self._parse_date(end) - self.formatter.start('download', datasets, config_file, kwargs) + self.formatter.start( + 'download', datasets, config_file, config_dir, kwargs + ) self.formatter.download(start, end, overwrite) def format(self, @@ -471,6 +508,7 @@ def format(self, start=None, end=None, install=False, + config_dir=None, **kwargs): """Format datasets. @@ -480,6 +518,11 @@ def format(self, List of datasets to format config_file : str, optional Path to ESMValTool's config user file, by default None + + .. deprecated:: 2.12.0 + This option has been deprecated in ESMValTool version 2.12.0 + and is scheduled for removal in version 2.14.0. Please use the + option `config_dir` instead. start : str, optional Start of the interval to process, by default None. Valid formats are YYYY, YYYYMM and YYYYMMDD. @@ -488,11 +531,25 @@ def format(self, are YYYY, YYYYMM and YYYYMMDD. install : bool, optional If true, move processed data to the folder, by default False + config_dir: str, optional + Path to additional ESMValTool configuration directory. See + :ref:`esmvalcore:config_yaml_files` for details. + """ + if config_file is not None: + msg = ( + "The option `config_file` has been deprecated in ESMValTool " + "version 2.12.0 and is scheduled for removal in version " + "2.14.0. Please use the option ``config_dir`` instead." + ) + warnings.warn(msg, ESMValToolDeprecationWarning) + start = self._parse_date(start) end = self._parse_date(end) - self.formatter.start('formatting', datasets, config_file, kwargs) + self.formatter.start( + 'formatting', datasets, config_file, config_dir, kwargs + ) self.formatter.format(start, end, install) def prepare(self, @@ -502,6 +559,7 @@ def prepare(self, end=None, overwrite=False, install=False, + config_dir=None, **kwargs): """Download and format a set of datasets. @@ -511,6 +569,11 @@ def prepare(self, List of datasets to format config_file : str, optional Path to ESMValTool's config user file, by default None + + .. deprecated:: 2.12.0 + This option has been deprecated in ESMValTool version 2.12.0 + and is scheduled for removal in version 2.14.0. Please use the + option `config_dir` instead. start : str, optional Start of the interval to process, by default None. Valid formats are YYYY, YYYYMM and YYYYMMDD. @@ -521,11 +584,25 @@ def prepare(self, If true, move processed data to the folder, by default False overwrite : bool, optional If true, download already present data again + config_dir: str, optional + Path to additional ESMValTool configuration directory. See + :ref:`esmvalcore:config_yaml_files` for details. + """ + if config_file is not None: + msg = ( + "The option `config_file` has been deprecated in ESMValTool " + "version 2.12.0 and is scheduled for removal in version " + "2.14.0. Please use the option ``config_dir`` instead." + ) + warnings.warn(msg, ESMValToolDeprecationWarning) + start = self._parse_date(start) end = self._parse_date(end) - self.formatter.start('preparation', datasets, config_file, kwargs) + self.formatter.start( + 'preparation', datasets, config_file, config_dir, kwargs + ) if self.formatter.download(start, end, overwrite): self.formatter.format(start, end, install) else: diff --git a/esmvaltool/cmorizers/data/datasets.yml b/esmvaltool/cmorizers/data/datasets.yml index 508b18ccec..cda27910bd 100644 --- a/esmvaltool/cmorizers/data/datasets.yml +++ b/esmvaltool/cmorizers/data/datasets.yml @@ -17,16 +17,16 @@ datasets: analyses covering analysis of monthly rainfall. The dataset provides consistent temporal and spatial analyses across Australia for each observed data variable. This accounts for spatial and temporal gaps in observations. Where possible, the gridded analysis techniques provide useful estimates in data-sparse regions - such as central Australia. - + such as central Australia. + Time coverage: Site-based data are used to provide gridded climate data at the monthly timescale for rainfall (1900+). Reference: Evans, A., Jones, D.A., Smalley, R., and Lellyett, S. 2020. An enhanced gridded rainfall analysis scheme for Australia. Bureau of Meteorology Research Report. No. 41. National Computational Infrastructure (NCI) - Catalogue Record: http://dx.doi.org/10.25914/6009600786063. - Data from NCI (National Computing Infrastructure Australia https://nci.org.au/), + Data from NCI (National Computing Infrastructure Australia https://nci.org.au/), requires an NCI account and access to Gadi(Supercomputer in Canberra) and the project found in catalogue record. Access can be requested through NCI. NCI is an ESGF node (https://esgf.nci.org.au/projects/esgf-nci/) - + ANUClimate: tier: 3 source: "https://dx.doi.org/10.25914/60a10aa56dd1b" @@ -35,7 +35,7 @@ datasets: Data from NCI project requiring an NCI account and access to GADI ANUClimate 2.0 consists of gridded daily and monthly climate variables across the terrestrial landmass of Australia - from at least 1970 to the present. Rainfall grids are generated from 1900 to the present. The underpinning spatial + from at least 1970 to the present. Rainfall grids are generated from 1900 to the present. The underpinning spatial models have been developed at the Fenner School of Environment and Society of the Australian National University. APHRO-MA: @@ -301,7 +301,7 @@ datasets: last_access: 2020-03-23 info: | Create a new empty directory ``$RAWOBSPATH/Tier2/CT2019`` (where - ``$RAWOBSPATH`` is given by your user configuration file) where the raw + ``$RAWOBSPATH`` is given by your configuration) where the raw data will be stored. The download of the data is automatically handled by this script. If data is already present in this directory, the download is skipped (to force a new download delete your old files). @@ -479,11 +479,11 @@ datasets: Download and processing instructions: Use the following CLI to download all the files: esmvaltool data download ESACCI-LANDCOVER - The underlying downloader is located here: + The underlying downloader is located here: /ESMValTool/esmvaltool/cmorizers/data/downloaders/datasets/esacci_landcover.py - and it will download all the files currently available on CEDA (1992-2020) + and it will download all the files currently available on CEDA (1992-2020) under a single directory as follow: ${RAWOBS}/Tier2/ESACCI-LANDCOVER - + ESACCI-LST: tier: 2 source: On CEDA-JASMIN, /gws/nopw/j04/esacci_lst/public @@ -554,7 +554,7 @@ datasets: source: https://wui.cmsaf.eu/safira/action/viewDoiDetails?acronym=COMBI_V001 last_access: 2024-02-21 info: | - CDR2 requires registration at EUMETSAT CM SAF, the information on how to + CDR2 requires registration at EUMETSAT CM SAF, the information on how to download the order will be emailed once the order is ready. All files need to be in one directory, not in yearly subdirectories. @@ -903,11 +903,11 @@ datasets: Select "Data Access" -> "Subset/Get Data" -> "Get Data" and follow the "Instructions for downloading". All *.he5 files need to be saved in the $RAWOBS/Tier3/MLS-AURA directory, where $RAWOBS refers to the RAWOBS - directory defined in the user configuration file. Apply this procedure to - both links provided above. The temperature fields are necessary for quality + directory defined in the configuration. Apply this procedure to both + links provided above. The temperature fields are necessary for quality control of the RHI data (see Data Quality Document for MLS-AURA for more information). - A registration is required + A registration is required. MOBO-DIC_MPIM: tier: 2 @@ -1078,7 +1078,7 @@ datasets: last_access: 2023-12-04 info: | Download the following files: - ersst.yyyymm.nc + ersst.yyyymm.nc for years 1854 to 2020 NOAA-ERSSTv5: @@ -1087,7 +1087,7 @@ datasets: last_access: 2023-12-04 info: | Download the following files: - ersst.v5.yyyymm.nc + ersst.v5.yyyymm.nc for years 1854 onwards NOAAGlobalTemp: @@ -1114,13 +1114,13 @@ datasets: Download daily data from: https://nsidc.org/data/NSIDC-0116 Login required for download, and also requires citation only to use - + NSIDC-G02202-sh: tier: 3 source: https://polarwatch.noaa.gov/erddap/griddap/nsidcG02202v4shmday last_access: 2023-05-13 info: | - Download monthly data. + Download monthly data. Login required for download, and also requires citation only to use OceanSODA-ETHZ: diff --git a/esmvaltool/cmorizers/data/download_scripts/download_era_interim.py b/esmvaltool/cmorizers/data/download_scripts/download_era_interim.py index 72cf8d98af..374c750ef6 100644 --- a/esmvaltool/cmorizers/data/download_scripts/download_era_interim.py +++ b/esmvaltool/cmorizers/data/download_scripts/download_era_interim.py @@ -12,8 +12,13 @@ 4. Copy/paste the text in https://api.ecmwf.int/v1/key/ into a blank text file and save it as $HOME/.ecmwfapirc -5. Use ESMValCore/esmvalcore/config-user.yml as an template -and set the rootpath of the output directory in RAWOBS +5. Copy the default configuration file with + +```bash +esmvaltool config get_config_user --path=config-user.yml +``` + +and set the ``rootpath`` for the RAWOBS project. 6. Check the description of the variables at https://apps.ecmwf.int/codes/grib/param-db diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/jra_55.py b/esmvaltool/cmorizers/data/downloaders/datasets/jra_55.py index a5dc5b851c..7a9e374136 100644 --- a/esmvaltool/cmorizers/data/downloaders/datasets/jra_55.py +++ b/esmvaltool/cmorizers/data/downloaders/datasets/jra_55.py @@ -1,14 +1,12 @@ """Script to download JRA-55 from RDA.""" import logging import os - from datetime import datetime from dateutil import relativedelta from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader - logger = logging.getLogger(__name__) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/noaa_ersstv3b.py b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_ersstv3b.py index 0ac6a3e012..5a54080be4 100644 --- a/esmvaltool/cmorizers/data/downloaders/datasets/noaa_ersstv3b.py +++ b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_ersstv3b.py @@ -1,6 +1,7 @@ """Script to download NOAA-ERSST-v3b.""" import logging from datetime import datetime + from dateutil import relativedelta from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/noaa_ersstv5.py b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_ersstv5.py index f995f9d2c7..7dbeccfe12 100644 --- a/esmvaltool/cmorizers/data/downloaders/datasets/noaa_ersstv5.py +++ b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_ersstv5.py @@ -1,6 +1,7 @@ """Script to download NOAA-ERSST-V5.""" import logging from datetime import datetime + from dateutil import relativedelta from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/nsidc_g02202_sh.py b/esmvaltool/cmorizers/data/downloaders/datasets/nsidc_g02202_sh.py index 798decda96..8c3c02c410 100644 --- a/esmvaltool/cmorizers/data/downloaders/datasets/nsidc_g02202_sh.py +++ b/esmvaltool/cmorizers/data/downloaders/datasets/nsidc_g02202_sh.py @@ -1,6 +1,7 @@ """Script to download NSIDC-G02202-sh.""" import logging from datetime import datetime + from dateutil import relativedelta from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader diff --git a/esmvaltool/cmorizers/data/formatters/datasets/ct2019.py b/esmvaltool/cmorizers/data/formatters/datasets/ct2019.py index 33f56f234d..64f64f4e82 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/ct2019.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/ct2019.py @@ -11,7 +11,7 @@ Download and processing instructions Create a new empty directory ``$RAWOBSPATH/Tier2/CT2019`` (where - ``$RAWOBSPATH`` is given by your user configuration file) where the raw + ``$RAWOBSPATH`` is given in the configuration) where the raw data will be stored. The download of the data is automatically handled by this script. If data is already present in this directory, the download is skipped (to force a new download delete your old files). diff --git a/esmvaltool/cmorizers/data/formatters/datasets/merra.ncl b/esmvaltool/cmorizers/data/formatters/datasets/merra.ncl index b57bca6a09..d9fbf761df 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/merra.ncl +++ b/esmvaltool/cmorizers/data/formatters/datasets/merra.ncl @@ -14,7 +14,7 @@ ; Download and processing instructions ; (requires EarthData login; see https://urs.earthdata.nasa.gov/) ; Use ESMValTool automatic download: -; esmvaltool data download --config_file MERRA +; esmvaltool data download MERRA ; ; Modification history ; 20230818-lauer_axel: added output of clwvi (iwp + lwp) @@ -209,7 +209,7 @@ begin delete(tmp) - ; calcuation of outgoing fluxes: out = in - net + ; calculation of outgoing fluxes: out = in - net if ((VAR(vv) .eq. "rsut") .or. (VAR(vv) .eq. "rsutcs")) then tmp = f->SWTDN if (isatt(tmp, "scale_factor") .or. isatt(tmp, "add_offset")) then @@ -220,7 +220,8 @@ begin delete(tmp) end if - ; calcuation of total precipitation flux = large-scale+convective+anvil + ; calculation of total precipitation flux = + ; large-scale+convective+anvil if (VAR(vv) .eq. "pr") then tmp = f->PRECCON ; surface precipitation flux from convection if (isatt(tmp, "scale_factor") .or. isatt(tmp, "add_offset")) then diff --git a/esmvaltool/cmorizers/data/formatters/datasets/mls_aura.py b/esmvaltool/cmorizers/data/formatters/datasets/mls_aura.py index 5b500e9087..0a5031b243 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/mls_aura.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/mls_aura.py @@ -14,7 +14,7 @@ Select "Data Access" -> "Subset/Get Data" -> "Get Data" and follow the "Instructions for downloading". All *.he5 files need to be saved in the $RAWOBS/Tier3/MLS-AURA directory, where $RAWOBS refers to the RAWOBS - directory defined in the user configuration file. Apply this procedure to + directory defined in the configuration. Apply this procedure to both links provided above. The temperature fields are necessary for quality control of the RHI data (see Data Quality Document for MLS-AURA for more information). diff --git a/esmvaltool/diag_scripts/kcs/local_resampling.py b/esmvaltool/diag_scripts/kcs/local_resampling.py index 9eb2ea28ed..0bf6260d65 100644 --- a/esmvaltool/diag_scripts/kcs/local_resampling.py +++ b/esmvaltool/diag_scripts/kcs/local_resampling.py @@ -292,7 +292,7 @@ def select_final_subset(cfg, subsets, prov=None): Final set of eight samples should have with minimal reuse of the same ensemble member for the same period. From 10.000 randomly - selected sets of 8 samples, count and penalize re-used segments (1 + selected sets of 8 samples, count and penalize reused segments (1 for 3*reuse, 5 for 4*reuse). Choose the set with the lowest penalty. """ n_samples = cfg['n_samples'] @@ -387,7 +387,7 @@ def _get_climatology(cfg, scenario_name, table, prov=None): resampled_control = _recombine(segments_control, table['control']) resampled_future = _recombine(segments_future, table['future']) - # Store the resampled contol climates + # Store the resampled control climates filename = get_diagnostic_filename(f'resampled_control_{scenario_name}', cfg, extension='nc') diff --git a/esmvaltool/diag_scripts/monitor/compute_eofs.py b/esmvaltool/diag_scripts/monitor/compute_eofs.py index dea5d63b9a..a07ca835c0 100644 --- a/esmvaltool/diag_scripts/monitor/compute_eofs.py +++ b/esmvaltool/diag_scripts/monitor/compute_eofs.py @@ -24,10 +24,10 @@ Path to the folder to store figures. Defaults to ``{plot_dir}/../../{dataset}/{exp}/{modeling_realm}/{real_name}``. All tags (i.e., the entries in curly brackets, e.g., ``{dataset}``, are - replaced with the corresponding tags). ``{plot_dir}`` is replaced with the + replaced with the corresponding tags). ``{plot_dir}`` is replaced with the default ESMValTool plot directory (i.e., ``output_dir/plots/diagnostic_name/script_name/``, see - :ref:`esmvalcore:user configuration file`). + :ref:`esmvalcore:outputdata`). rasterize_maps: bool, optional (default: True) If ``True``, use `rasterization `_ for diff --git a/esmvaltool/diag_scripts/monitor/monitor.py b/esmvaltool/diag_scripts/monitor/monitor.py index 59e37b9842..dda5aa4f3d 100644 --- a/esmvaltool/diag_scripts/monitor/monitor.py +++ b/esmvaltool/diag_scripts/monitor/monitor.py @@ -52,10 +52,10 @@ Path to the folder to store figures. Defaults to ``{plot_dir}/../../{dataset}/{exp}/{modeling_realm}/{real_name}``. All tags (i.e., the entries in curly brackets, e.g., ``{dataset}``, are - replaced with the corresponding tags). ``{plot_dir}`` is replaced with the + replaced with the corresponding tags). ``{plot_dir}`` is replaced with the default ESMValTool plot directory (i.e., ``output_dir/plots/diagnostic_name/script_name/``, see - :ref:`esmvalcore:user configuration file`). + :ref:`esmvalcore:outputdata`). rasterize_maps: bool, optional (default: True) If ``True``, use `rasterization `_ for diff --git a/esmvaltool/diag_scripts/monitor/multi_datasets.py b/esmvaltool/diag_scripts/monitor/multi_datasets.py index 879346954c..32f654b3b6 100644 --- a/esmvaltool/diag_scripts/monitor/multi_datasets.py +++ b/esmvaltool/diag_scripts/monitor/multi_datasets.py @@ -100,10 +100,10 @@ Path to the folder to store figures. Defaults to ``{plot_dir}/../../{dataset}/{exp}/{modeling_realm}/{real_name}``. All tags (i.e., the entries in curly brackets, e.g., ``{dataset}``, are - replaced with the corresponding tags). ``{plot_dir}`` is replaced with the + replaced with the corresponding tags). ``{plot_dir}`` is replaced with the default ESMValTool plot directory (i.e., ``output_dir/plots/diagnostic_name/script_name/``, see - :ref:`esmvalcore:user configuration file`). + :ref:`esmvalcore:outputdata`). savefig_kwargs: dict, optional Optional keyword arguments for :func:`matplotlib.pyplot.savefig`. By default, uses ``bbox_inches: tight, dpi: 300, orientation: landscape``. diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6a.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6a.ncl index bd672ed3cf..0f1b49c224 100644 --- a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6a.ncl +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6a.ncl @@ -151,10 +151,8 @@ begin fx_variable = "volcello" error_msg("f", "russell18jgr-fig6.ncl", " ", "volcello file for " \ + vo_datasets(iii) \ - + " not found in the metadata file, please add "\ - + "'fx_files: [volcello]' to the variable dictionary in the " \ - + "recipe or add the location of file to input directory " \ - + "in config-user.yml ") + + " not found in the metadata file, please specify " \ + + "'volcello' as supplementary variable in the recipe.") end if dataset_so_time = read_data(so_items[iii]) diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6b.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6b.ncl index 6b019625f0..71323f411d 100644 --- a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6b.ncl +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6b.ncl @@ -45,10 +45,10 @@ ; ; Caveats ; -; - MIROC-ESM and BNU-ESM doesnot work as depth variable is not called lev. -; - MRI_ESM1 doesnot work as the data is ofset by 80 degrees in longitude +; - MIROC-ESM and BNU-ESM does not work as depth variable is not called lev. +; - MRI_ESM1 does not work as the data is offset by 80 degrees in longitude ; and causes problem in interpolation. -; - CCSM4 ans CESM1-CAM5 dont work as the units for so is 1, not accepted +; - CCSM4 and CESM1-CAM5 dont work as the units for so is 1, not accepted ; by ESMValTool. ; - Transport is very small in case of NorESM1-M and ME as volcello ; values look incorrect(very small). @@ -153,11 +153,10 @@ begin if (all(ismissing(fx_var))) then fx_variable = "volcello" - error_msg("f", "russell_fig-7i.ncl", " ", "areacello file for " + \ + error_msg("f", "russell_fig-7i.ncl", " ", "volcello file for " + \ vo_datasets(iii) \ - + " not found in the metadata file, please " + \ - "add 'fx_files: [volcello]' to the variable dictionary in" + \ - " the recipe or add the location of file to config-user.yml") + + " not found in the metadata file, please specify " \ + + "'volcello' as supplementary variable in the recipe.") end if dataset_so_time = read_data(so_items[iii]) diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig7i.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig7i.ncl index 86ce4bee70..cf14857a7b 100644 --- a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig7i.ncl +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig7i.ncl @@ -156,9 +156,8 @@ begin fx_variable = "areacello" error_msg("f", "russell_fig-7i.ncl", " ", "areacello file for " + \ datasetnames(iii) + " not found in the metadata file," + \ - " please add 'fx_files: [areacello]' to the variable " + \ - "dictionary in the recipe or add the location of " + \ - " file to config-user.yml") + + " not found in the metadata file, please specify " \ + + "'areacello' as supplementary variable in the recipe.") end if areacello_2d = fx_var delete(fx_var) @@ -212,9 +211,9 @@ begin "lgPerimOn" : False ; no perimeter "lgItemCount" : dimsizes(annots) ; how many "lgLineLabelStrings" : annots ; labels - "lgLabelsOn" : False ; no default lables + "lgLabelsOn" : False ; no default labsels "lgLineLabelFontHeightF" : 0.0085 ; font height - "lgDashIndexes" : dashes ; line paterns + "lgDashIndexes" : dashes ; line patterns "lgLineColors" : colors "lgMonoLineLabelFontColor" : True ; one label color end create diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9c.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9c.ncl index 2fe0cc3e4a..017b70103a 100644 --- a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9c.ncl +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9c.ncl @@ -227,9 +227,8 @@ begin if (all(ismissing(fx_var))) then error_msg("f", "russell18jgr-fig9c.ncl", " ", "areacello file for " + \ datasetnames(iii) + " not found in the metadata file, " + \ - "please add 'fx_files: [areacello]' to the variable " + \ - "dictionary in the recipe or add the location of " + \ - " file to config-user.yml ") + + " not found in the metadata file, please specify " \ + + "'areacello' as supplementary variable in the recipe.") end if areacello_2d = fx_var @@ -304,9 +303,9 @@ begin "lgPerimOn" : False ; no perimeter "lgItemCount" : dimsizes(annots) ; how many "lgLabelStrings" : annots ; labels - "lgLabelsOn" : True ; no default lables + "lgLabelsOn" : True ; no default labels "lgLabelFontHeightF" : 0.001 ; font height - "lgItemType" : "markers" ; line paterns + "lgItemType" : "markers" ; line patterns "lgMarkerColors" : colors "lgMarkerIndexes" : markers ; one label color end create diff --git a/esmvaltool/interface_scripts/logging.ncl b/esmvaltool/interface_scripts/logging.ncl index 6333479f96..35c3167341 100644 --- a/esmvaltool/interface_scripts/logging.ncl +++ b/esmvaltool/interface_scripts/logging.ncl @@ -61,9 +61,9 @@ procedure log_debug(output_string[*]:string) ; output_string: the text to be output as message on screen ; ; Description -; Write a debug message to the log file (only if log_level = debug in -; config-user.yml). If the input is an array, each element will be -; written on different lines. +; Write a debug message to the log file (only if log_level = debug in the +; configuration). If the input is an array, each element will be written on +; different lines. ; ; Caveats ; diff --git a/esmvaltool/recipes/examples/recipe_extract_shape.yml b/esmvaltool/recipes/examples/recipe_extract_shape.yml index 79f04371b5..08d1bab490 100644 --- a/esmvaltool/recipes/examples/recipe_extract_shape.yml +++ b/esmvaltool/recipes/examples/recipe_extract_shape.yml @@ -7,7 +7,7 @@ documentation: The example shapefile(s) can be copied from esmvaltool/diag_scripts/shapeselect/testdata/Elbe.* and - placed in the auxiliary_data_dir defined in config-user.yml. + placed in the auxiliary_data_dir defined in the configuration. title: Example recipe extracting precipitation in the Elbe catchment. diff --git a/esmvaltool/recipes/hydrology/recipe_hydro_forcing.yml b/esmvaltool/recipes/hydrology/recipe_hydro_forcing.yml index f68a597733..925d9bd420 100644 --- a/esmvaltool/recipes/hydrology/recipe_hydro_forcing.yml +++ b/esmvaltool/recipes/hydrology/recipe_hydro_forcing.yml @@ -9,7 +9,7 @@ documentation: used to: 1. Plot a timeseries of the raw daily data - 2. Plot monthly aggregrated data over a certain period + 2. Plot monthly aggregated data over a certain period 3. Plot the monthly climate statistics over a certain period authors: @@ -33,7 +33,7 @@ datasets: preprocessors: daily: extract_shape: &extract_shape - # In aux (config-user.yml) + # Relative to auxiliary_data_dir defined in configuration shapefile: Lorentz_Basin_Shapefiles/Meuse/Meuse.shp method: contains crop: true diff --git a/esmvaltool/recipes/hydrology/recipe_lisflood.yml b/esmvaltool/recipes/hydrology/recipe_lisflood.yml index ffecbc37be..3acb4be481 100644 --- a/esmvaltool/recipes/hydrology/recipe_lisflood.yml +++ b/esmvaltool/recipes/hydrology/recipe_lisflood.yml @@ -37,7 +37,8 @@ preprocessors: scheme: linear extract_shape: # Perhaps a single shapefile needs to be created covering multiple basins - shapefile: Lorentz_Basin_Shapefiles/Meuse/Meuse.shp # (config-user, aux) + # Relative to auxiliary_data_dir defined in configuration + shapefile: Lorentz_Basin_Shapefiles/Meuse/Meuse.shp method: contains crop: true # set to false to keep the entire globe (memory intensive!) daily_water: diff --git a/esmvaltool/recipes/hydrology/recipe_marrmot.yml b/esmvaltool/recipes/hydrology/recipe_marrmot.yml index dd6eef0a49..e85a66d9b9 100644 --- a/esmvaltool/recipes/hydrology/recipe_marrmot.yml +++ b/esmvaltool/recipes/hydrology/recipe_marrmot.yml @@ -28,7 +28,8 @@ preprocessors: daily: &daily extract_shape: # Lumped model: needs catchment-aggregated input data - shapefile: Meuse/Meuse.shp # In aux (config-user.yml) + # Relative to auxiliary_data_dir defined in configuration + shapefile: Meuse/Meuse.shp method: contains crop: true diff --git a/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_fig_3_42_a.yml b/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_fig_3_42_a.yml index 20b0402a23..55c53147ec 100644 --- a/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_fig_3_42_a.yml +++ b/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_fig_3_42_a.yml @@ -10,7 +10,7 @@ documentation: Contribution to the Sixth Assessment Report: Chapter 3 Processing of CMIP3 models currently works only in serial mode, due to an issue in the input data still under investigation. To run the recipe - set: max_parallel_tasks: 1 in the config-user.yml file. + set the configuration option ``max_parallel_tasks: 1``. authors: - bock_lisa diff --git a/esmvaltool/recipes/recipe_carvalhais14nat.yml b/esmvaltool/recipes/recipe_carvalhais14nat.yml index 9ec0811c00..63bfbb1edd 100644 --- a/esmvaltool/recipes/recipe_carvalhais14nat.yml +++ b/esmvaltool/recipes/recipe_carvalhais14nat.yml @@ -8,7 +8,7 @@ documentation: Carvalhais et al., 2014, Nature. The data required in the obs_details section can be obtained at http://www.bgc-jena.mpg.de/geodb/BGI/tau4ESMValTool.php - and have to be stored in the auxiliary_data_dir defined i config-user.yml, + and have to be stored in the auxiliary_data_dir defined in the configuration in a subdirectory obs_data_subdir specified in the obs_details section below. diff --git a/esmvaltool/recipes/recipe_runoff_et.yml b/esmvaltool/recipes/recipe_runoff_et.yml index 6924321c7c..0a83213caa 100644 --- a/esmvaltool/recipes/recipe_runoff_et.yml +++ b/esmvaltool/recipes/recipe_runoff_et.yml @@ -8,7 +8,7 @@ documentation: water balance components for different catchments and compares the results against observations. Currently, the required catchment mask needs to be downloaded manually at https://doi.org/10.5281/zenodo.2025776 and saved in - the auxiliary_data_dir defined in config-user.yml. + the auxiliary_data_dir defined in configuration. authors: - hagemann_stefan diff --git a/esmvaltool/recipes/recipe_sea_surface_salinity.yml b/esmvaltool/recipes/recipe_sea_surface_salinity.yml index 4e670eec7f..43ec0e6b5e 100644 --- a/esmvaltool/recipes/recipe_sea_surface_salinity.yml +++ b/esmvaltool/recipes/recipe_sea_surface_salinity.yml @@ -20,8 +20,7 @@ documentation: preprocessors: timeseries: extract_shape: - # Relative paths are relative to 'auxiliary_data_dir' as configured in - # the config-user.yml file. + # Relative paths are relative to the configuration option 'auxiliary_data_dir'. # The example shapefile can be downloaded from # https://marineregions.org/download_file.php?name=World_Seas_IHO_v3.zip # but any shapefile can be used @@ -50,7 +49,7 @@ datasets: - {<<: *cmip6, dataset: MPI-ESM1-2-HR, alias: MPI-ESM1-2-HR} - {<<: *cmip6, dataset: NorESM2-MM, alias: NorESM2-MM} - {<<: *cmip6, dataset: GISS-E2-2-H, alias: GISS-E2-2-H, institute: NASA-GISS} - + diagnostics: compare_salinity: diff --git a/esmvaltool/recipes/recipe_shapeselect.yml b/esmvaltool/recipes/recipe_shapeselect.yml index 0fb22c0d5d..ee56810f03 100644 --- a/esmvaltool/recipes/recipe_shapeselect.yml +++ b/esmvaltool/recipes/recipe_shapeselect.yml @@ -36,8 +36,7 @@ diagnostics: script: shapeselect/diag_shapeselect.py # Example shapefiles can be found in: # esmvaltool/diag_scripts/shapeselect/testdata/ - # Relative paths are relative to 'auxiliary_data_dir' as configured in - # the config-user.yml file. + # Relative paths are relative to configuration option 'auxiliary_data_dir'. shapefile: 'Thames.shp' weighting_method: 'mean_inside' write_xlsx: true diff --git a/esmvaltool/utils/batch-jobs/generate.py b/esmvaltool/utils/batch-jobs/generate.py index d1ceeffaa0..428229b6eb 100644 --- a/esmvaltool/utils/batch-jobs/generate.py +++ b/esmvaltool/utils/batch-jobs/generate.py @@ -9,7 +9,7 @@ - conda_path 2) If needed, edit optional parameters: - outputs -- config_file +- config_dir 3) SLURM settings This script is configured to optimize the computing footprint of the recipe testing. It is not necessary to edit @@ -49,11 +49,11 @@ # Full path to the miniforge3/etc/profile.d/conda.sh executable # Set the path to conda conda_path = 'PATH_TO/miniforge3/etc/profile.d/conda.sh' -# Full path to config_file -# If none, ~/.esmvaltool/config-user.yml is used -config_file = '' +# Full path to configuration directory +# If none, ~/.config/esmvaltool/ +config_dir = '' # Set max_parallel_tasks -# If none, read from config_file +# If none, read from configuration default_max_parallel_tasks = 8 # List of recipes that require non-default SLURM options set above @@ -315,11 +315,11 @@ def generate_submit(): file.write(f'. {conda_path}\n') file.write(f'conda activate {env}\n') file.write('\n') - if not config_file: + if not config_dir: file.write(f'esmvaltool run {str(recipe)}') else: - file.write(f'esmvaltool run --config_file ' - f'{str(config_file)} {str(recipe)}') + file.write(f'esmvaltool run --config_dir ' + f'{str(config_dir)} {str(recipe)}') # set max_parallel_tasks max_parallel_tasks = MAX_PARALLEL_TASKS.get( recipe.stem, diff --git a/tests/integration/test_cmorizer.py b/tests/integration/test_cmorizer.py index 11bade4190..48f75b951a 100644 --- a/tests/integration/test_cmorizer.py +++ b/tests/integration/test_cmorizer.py @@ -4,6 +4,7 @@ import os import sys +import esmvalcore import iris import iris.coord_systems import iris.coords @@ -13,7 +14,9 @@ import pytest import yaml from cf_units import Unit +from packaging import version +from esmvaltool import ESMValToolDeprecationWarning from esmvaltool.cmorizers.data.cmorizer import DataCommand @@ -28,8 +31,8 @@ def keep_cwd(): os.chdir(curr_path) -def write_config_user_file(dirname): - """Replace config_user file values for testing.""" +def write_config_file(dirname): + """Replace configuration values for testing.""" config_file = dirname / 'config-user.yml' cfg = { 'output_dir': str(dirname / 'output_dir'), @@ -143,14 +146,59 @@ def arguments(*args): sys.argv = backup -def test_cmorize_obs_woa_no_data(tmp_path): +@pytest.mark.skipif( + version.parse(esmvalcore.__version__) >= version.parse("2.14.0"), + reason='ESMValCore >= v2.14.0', +) +def test_cmorize_obs_woa_no_data_config_file(tmp_path): """Test for example run of cmorize_obs command.""" + config_file = write_config_file(tmp_path) + os.makedirs(os.path.join(tmp_path, 'raw_stuff', 'Tier2')) + os.makedirs(os.path.join(tmp_path, 'output_dir')) + with keep_cwd(): + with pytest.raises(RuntimeError): + with pytest.warns(ESMValToolDeprecationWarning): + DataCommand().format('WOA', config_file=config_file) + + log_dir = os.path.join(tmp_path, 'output_dir') + log_file = os.path.join(log_dir, + os.listdir(log_dir)[0], 'run', 'main_log.txt') + check_log_file(log_file, no_data=True) + + +@pytest.mark.skipif( + version.parse(esmvalcore.__version__) >= version.parse("2.14.0"), + reason='ESMValCore >= v2.14.0', +) +def test_cmorize_obs_woa_data_config_file(tmp_path): + """Test for example run of cmorize_obs command.""" + config_file = write_config_file(tmp_path) + data_path = os.path.join(tmp_path, 'raw_stuff', 'Tier2', 'WOA') + put_dummy_data(data_path) + with keep_cwd(): + with pytest.warns(ESMValToolDeprecationWarning): + DataCommand().format('WOA', config_file=config_file) - config_user_file = write_config_user_file(tmp_path) + log_dir = os.path.join(tmp_path, 'output_dir') + log_file = os.path.join(log_dir, + os.listdir(log_dir)[0], 'run', 'main_log.txt') + check_log_file(log_file, no_data=False) + output_path = os.path.join(log_dir, os.listdir(log_dir)[0], 'Tier2', 'WOA') + check_output_exists(output_path) + check_conversion(output_path) + + +@pytest.mark.skipif( + version.parse(esmvalcore.__version__) < version.parse("2.12.0"), + reason='ESMValCore < v2.12.0', +) +def test_cmorize_obs_woa_no_data(tmp_path): + """Test for example run of cmorize_obs command.""" + write_config_file(tmp_path) os.makedirs(os.path.join(tmp_path, 'raw_stuff', 'Tier2')) with keep_cwd(): - with pytest.raises(Exception): - DataCommand().format('WOA', config_user_file) + with pytest.raises(RuntimeError): + DataCommand().format('WOA', config_dir=str(tmp_path)) log_dir = os.path.join(tmp_path, 'output_dir') log_file = os.path.join(log_dir, @@ -158,14 +206,17 @@ def test_cmorize_obs_woa_no_data(tmp_path): check_log_file(log_file, no_data=True) +@pytest.mark.skipif( + version.parse(esmvalcore.__version__) < version.parse("2.12.0"), + reason='ESMValCore < v2.12.0', +) def test_cmorize_obs_woa_data(tmp_path): """Test for example run of cmorize_obs command.""" - - config_user_file = write_config_user_file(tmp_path) + write_config_file(tmp_path) data_path = os.path.join(tmp_path, 'raw_stuff', 'Tier2', 'WOA') put_dummy_data(data_path) with keep_cwd(): - DataCommand().format('WOA', config_user_file) + DataCommand().format('WOA', config_dir=str(tmp_path)) log_dir = os.path.join(tmp_path, 'output_dir') log_file = os.path.join(log_dir, diff --git a/tests/integration/test_diagnostic_run.py b/tests/integration/test_diagnostic_run.py index b0c606f4ee..670f7088dd 100644 --- a/tests/integration/test_diagnostic_run.py +++ b/tests/integration/test_diagnostic_run.py @@ -5,12 +5,14 @@ from pathlib import Path from textwrap import dedent +import esmvalcore import pytest import yaml from esmvalcore._main import run +from packaging import version -def write_config_user_file(dirname): +def write_config_file(dirname): config_file = dirname / 'config-user.yml' cfg = { 'output_dir': str(dirname / 'output_dir'), @@ -68,10 +70,13 @@ def check(result_file): ] +@pytest.mark.skipif( + version.parse(esmvalcore.__version__) >= version.parse("2.14.0"), + reason='ESMValCore >= v2.14.0', +) @pytest.mark.installation @pytest.mark.parametrize('script_file', SCRIPTS) -def test_diagnostic_run(tmp_path, script_file): - +def test_diagnostic_run_config_file(tmp_path, script_file): local_script_file = Path(__file__).parent / script_file recipe_file = tmp_path / 'recipe_test.yml' @@ -96,12 +101,58 @@ def test_diagnostic_run(tmp_path, script_file): """.format(script_file, result_file)) recipe_file.write_text(str(recipe)) - config_user_file = write_config_user_file(tmp_path) + config_file = write_config_file(tmp_path) with arguments( 'esmvaltool', 'run', '--config_file', - config_user_file, + config_file, + str(recipe_file), + ): + run() + + check(result_file) + + +@pytest.mark.skipif( + version.parse(esmvalcore.__version__) < version.parse("2.12.0"), + reason='ESMValCore < v2.12.0', +) +@pytest.mark.installation +@pytest.mark.parametrize('script_file', SCRIPTS) +def test_diagnostic_run(tmp_path, script_file): + local_script_file = Path(__file__).parent / script_file + + recipe_file = tmp_path / 'recipe_test.yml' + script_file = tmp_path / script_file + result_file = tmp_path / 'result.yml' + config_dir = tmp_path / 'config' + config_dir.mkdir(exist_ok=True, parents=True) + + shutil.copy(local_script_file, script_file) + + # Create recipe + recipe = dedent(""" + documentation: + title: Test recipe + description: Recipe with no data. + authors: [andela_bouwe] + + diagnostics: + diagnostic_name: + scripts: + script_name: + script: {} + setting_name: {} + """.format(script_file, result_file)) + recipe_file.write_text(str(recipe)) + + write_config_file(config_dir) + with arguments( + 'esmvaltool', + 'run', + '--config_dir', + str(config_dir), str(recipe_file), ): run() From c4b8d025a0e1df4a286a017e49d03f69a2b37d7f Mon Sep 17 00:00:00 2001 From: Valeriu Predoi Date: Tue, 22 Oct 2024 16:08:19 +0100 Subject: [PATCH 04/36] Readthedocs configuration/builds: revert to miniconda before miniforge is available (#3785) Co-authored-by: Bouwe Andela --- .readthedocs.yaml | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 071686d373..974ac2ee78 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -7,20 +7,13 @@ version: 2 # Set the version of Python and other tools you might need build: - os: ubuntu-22.04 + os: ubuntu-lts-latest tools: - # updated and deployed from Aug 1, 2023 - python: "mambaforge-22.9" + # try miniforge3 when available? see github.com/ESMValGroup/ESMValTool/issues/3779 + # DO NOT use mambaforge-*; that is currently sunsetted + python: "miniconda-latest" jobs: - pre_create_environment: - # update mamba just in case - - mamba update --yes --quiet --name=base mamba 'zstd=1.5.2' - - mamba --version - - mamba list --name=base post_create_environment: - - conda run -n ${CONDA_DEFAULT_ENV} mamba list - # use conda run executable wrapper to have all env variables - - conda run -n ${CONDA_DEFAULT_ENV} mamba --version - conda run -n ${CONDA_DEFAULT_ENV} pip install . --no-deps # Declare the requirements required to build your docs From b86acb3af4f328ca8bef776ef6abd8ac1408b98e Mon Sep 17 00:00:00 2001 From: max-anu <137736464+max-anu@users.noreply.github.com> Date: Tue, 29 Oct 2024 07:43:44 +1100 Subject: [PATCH 05/36] Adding pr, tauu, tauv, tos to NCEP2 CMORISer (#3765) Co-authored-by: Max Proft Co-authored-by: Max Proft Co-authored-by: Romain Beucher Co-authored-by: Max Proft --- CITATION.cff | 5 +++++ doc/sphinx/source/input.rst | 2 +- .../data/cmor_config/NCEP-DOE-R2.yml | 22 +++++++++++++++++++ esmvaltool/cmorizers/data/datasets.yml | 5 +++++ .../data/downloaders/datasets/ncep_doe_r2.py | 8 +++++++ .../recipes/examples/recipe_check_obs.yml | 4 ++++ 6 files changed, 45 insertions(+), 1 deletion(-) diff --git a/CITATION.cff b/CITATION.cff index 22eb3c500e..1934c36ef1 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -275,6 +275,11 @@ authors: family-names: Phillips given-names: Adam orcid: "https://orcid.org/0000-0003-4859-8585" + - + affiliation: "ACCESS-NRI, Australia" + family-names: Proft + given-names: Max + orcid: "https://orcid.org/0009-0003-1611-9516" - affiliation: "University of Arizona, USA" family-names: Russell diff --git a/doc/sphinx/source/input.rst b/doc/sphinx/source/input.rst index d743ede59f..556c999774 100644 --- a/doc/sphinx/source/input.rst +++ b/doc/sphinx/source/input.rst @@ -404,7 +404,7 @@ A list of the datasets for which a CMORizers is available is provided in the fol | | tasmax, tasmin, ts, ua, va, wap, zg (Amon) | | | | | pr, rlut, ua, va (day) | | | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| NCEP-DOE-R2 | clt, hur, prw, ta, wap (Amon) | 2 | Python | +| NCEP-DOE-R2 | clt, hur, prw, ta, wap, pr, tauu, tauv, tos (Amon) | 2 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | NDP | cVeg (Lmon) | 3 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ diff --git a/esmvaltool/cmorizers/data/cmor_config/NCEP-DOE-R2.yml b/esmvaltool/cmorizers/data/cmor_config/NCEP-DOE-R2.yml index e0768cf354..f18f76f5a9 100644 --- a/esmvaltool/cmorizers/data/cmor_config/NCEP-DOE-R2.yml +++ b/esmvaltool/cmorizers/data/cmor_config/NCEP-DOE-R2.yml @@ -39,3 +39,25 @@ variables: mip: Amon raw: omega file: 'omega\.mon\.mean\.nc' + pr_month: + short_name: pr + mip: Amon + raw: prate + file: 'prate.sfc.mon.mean.nc' + tauu_month: + short_name: tauu + mip: Amon + raw: uflx + file: 'uflx.sfc.mon.mean.nc' + make_negative: true + tauv_month: + short_name: tauv + mip: Amon + raw: vflx + file: 'vflx.sfc.mon.mean.nc' + make_negative: true + tos_month: + short_name: tos + mip: Amon + raw: skt + file: 'skt.sfc.mon.mean.nc' diff --git a/esmvaltool/cmorizers/data/datasets.yml b/esmvaltool/cmorizers/data/datasets.yml index cda27910bd..019986343b 100644 --- a/esmvaltool/cmorizers/data/datasets.yml +++ b/esmvaltool/cmorizers/data/datasets.yml @@ -961,9 +961,14 @@ datasets: pressure/ rhum.mon.mean.nc air.mon.mean.nc + omega.mon.mean.nc https://downloads.psl.noaa.gov/Datasets/ncep.reanalysis2/Monthlies/ gaussian_grid tcdc.eatm.mon.mean.nc + prate.sfc.mon.mean.nc + uflx.sfc.mon.mean.nc + vflx.sfc.mon.mean.nc + skt.sfc.mon.mean.nc https://downloads.psl.noaa.gov/Datasets/ncep.reanalysis2/Monthlies/ surface pr_wtr.eatm.mon.mean.nc diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/ncep_doe_r2.py b/esmvaltool/cmorizers/data/downloaders/datasets/ncep_doe_r2.py index 704493554f..2d691e710d 100644 --- a/esmvaltool/cmorizers/data/downloaders/datasets/ncep_doe_r2.py +++ b/esmvaltool/cmorizers/data/downloaders/datasets/ncep_doe_r2.py @@ -48,3 +48,11 @@ def download_dataset(config, dataset, dataset_info, start_date, end_date, wget_options=[]) downloader.download_file(url + "surface/pr_wtr.eatm.mon.mean.nc", wget_options=[]) + downloader.download_file(url + "gaussian_grid/prate.sfc.mon.mean.nc", + wget_options=[]) + downloader.download_file(url + "gaussian_grid/uflx.sfc.mon.mean.nc", + wget_options=[]) + downloader.download_file(url + "gaussian_grid/vflx.sfc.mon.mean.nc", + wget_options=[]) + downloader.download_file(url + "gaussian_grid/skt.sfc.mon.mean.nc", + wget_options=[]) diff --git a/esmvaltool/recipes/examples/recipe_check_obs.yml b/esmvaltool/recipes/examples/recipe_check_obs.yml index 8c7ba0a382..36b65eb472 100644 --- a/esmvaltool/recipes/examples/recipe_check_obs.yml +++ b/esmvaltool/recipes/examples/recipe_check_obs.yml @@ -699,6 +699,10 @@ diagnostics: prw: ta: wap: + pr: + tauu: + tauv: + tos: additional_datasets: - {dataset: NCEP-DOE-R2, project: OBS6, mip: Amon, tier: 2, type: reanaly, version: 2, start_year: 1979, end_year: 2022} From f38bbf6359eda6b06c28e4b7b424030ac46647a3 Mon Sep 17 00:00:00 2001 From: max-anu <137736464+max-anu@users.noreply.github.com> Date: Tue, 29 Oct 2024 08:47:48 +1100 Subject: [PATCH 06/36] Adding a CMORiser for CMAP data for pr (#3766) Co-authored-by: Max Proft --- doc/sphinx/source/input.rst | 2 + .../cmorizers/data/cmor_config/CMAP.yml | 21 ++++++ esmvaltool/cmorizers/data/datasets.yml | 9 +++ .../data/downloaders/datasets/cmap.py | 38 ++++++++++ .../data/formatters/datasets/cmap.py | 69 +++++++++++++++++++ .../recipes/examples/recipe_check_obs.yml | 10 +++ 6 files changed, 149 insertions(+) create mode 100644 esmvaltool/cmorizers/data/cmor_config/CMAP.yml create mode 100644 esmvaltool/cmorizers/data/downloaders/datasets/cmap.py create mode 100644 esmvaltool/cmorizers/data/formatters/datasets/cmap.py diff --git a/doc/sphinx/source/input.rst b/doc/sphinx/source/input.rst index 556c999774..fbc16b45ec 100644 --- a/doc/sphinx/source/input.rst +++ b/doc/sphinx/source/input.rst @@ -269,6 +269,8 @@ A list of the datasets for which a CMORizers is available is provided in the fol +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | CLOUDSAT-L2 | clw, clivi, clwvi, lwp (Amon) | 3 | NCL | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| CMAP | pr (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | CowtanWay | tasa (Amon) | 2 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | CRU | tas, tasmin, tasmax, pr, clt (Amon), evspsblpot (Emon) | 2 | Python | diff --git a/esmvaltool/cmorizers/data/cmor_config/CMAP.yml b/esmvaltool/cmorizers/data/cmor_config/CMAP.yml new file mode 100644 index 0000000000..eef1861f08 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/CMAP.yml @@ -0,0 +1,21 @@ +--- +# Global attributes of NetCDF file +attributes: + dataset_id: CMAP + project_id: OBS6 + tier: 2 + version: "v1" + modeling_realm: reanaly + source: "https://psl.noaa.gov/data/gridded/data.cmap.html" + reference: "cmap" + comment: | + '' + +# Variables to CMORize +variables: + # monthly frequency + pr_month: + short_name: pr + mip: Amon + raw: precip + file: "precip.mon.mean.nc" diff --git a/esmvaltool/cmorizers/data/datasets.yml b/esmvaltool/cmorizers/data/datasets.yml index 019986343b..4c7c168009 100644 --- a/esmvaltool/cmorizers/data/datasets.yml +++ b/esmvaltool/cmorizers/data/datasets.yml @@ -264,6 +264,15 @@ datasets: named like the year (e.g. 2007), no subdirectories with days etc. + CMAP: + tier: 2 + source: https://psl.noaa.gov/data/gridded/data.cmap.html + last_access: 2024-09-09 + info: | + To facilitate the download, the links to the https server are provided. + https://downloads.psl.noaa.gov/Datasets/cmap/enh/ + precip.mon.mean.nc + CowtanWay: tier: 2 source: https://www-users.york.ac.uk/~kdc3/papers/coverage2013/series.html diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/cmap.py b/esmvaltool/cmorizers/data/downloaders/datasets/cmap.py new file mode 100644 index 0000000000..5fd58b5ac1 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/cmap.py @@ -0,0 +1,38 @@ +"""Script to download CMAP (CPC Merged Analysis of Precipitation).""" + +import logging + +from esmvaltool.cmorizers.data.downloaders.ftp import FTPDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = FTPDownloader( + config=config, + server="ftp2.psl.noaa.gov", + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + downloader.connect() + + downloader.download_file("/Datasets/cmap/enh/precip.mon.mean.nc") diff --git a/esmvaltool/cmorizers/data/formatters/datasets/cmap.py b/esmvaltool/cmorizers/data/formatters/datasets/cmap.py new file mode 100644 index 0000000000..656942b49a --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/cmap.py @@ -0,0 +1,69 @@ +"""ESMValTool CMORizer for CMAP (CPC Merged Analysis of Precipitation) data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://psl.noaa.gov/data/gridded/data.cmap.html + +Last access + 20240909 + +Download and processing instructions + To facilitate the download, the links to the ftp server are provided. + + https://downloads.psl.noaa.gov/Datasets/cmap/enh/ + precip.mon.mean.nc + +Caveats + +""" + +import logging +import re +from copy import deepcopy +from pathlib import Path + +import iris +from esmvaltool.cmorizers.data import utilities as utils + + +logger = logging.getLogger(__name__) + + +def _extract_variable(short_name, var, cfg, raw_filepath, out_dir): + cmor_info = cfg["cmor_table"].get_variable(var["mip"], short_name) + attributes = deepcopy(cfg["attributes"]) + attributes["mip"] = var["mip"] + + cubes = iris.load(raw_filepath) + for cube in cubes: + assert cube.units == "mm/day", f"unknown units:{cube.units}" + # convert data from mm/day to kg m-2 s-1 + # mm/day ~ density_water * mm/day + # = 1000 kg m-3 * 1/(1000*86400) m s-1 = 1/86400 kg m-2 s-1 + cube = cube / 86400 + cube.units = "kg m-2 s-1" + + utils.fix_var_metadata(cube, cmor_info) + cube = utils.fix_coords(cube) + utils.set_global_atts(cube, attributes) + + logger.info("Saving file") + utils.save_variable(cube, short_name, out_dir, attributes, + unlimited_dimensions=["time"]) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + for short_name, var in cfg["variables"].items(): + logger.info("CMORizing variable '%s'", short_name) + short_name = var["short_name"] + raw_filenames = Path(in_dir).rglob("*.nc") + filenames = [] + for raw_filename in raw_filenames: + if re.search(var["file"], str(raw_filename)) is not None: + filenames.append(raw_filename) + + for filename in sorted(filenames): + _extract_variable(short_name, var, cfg, filename, out_dir) diff --git a/esmvaltool/recipes/examples/recipe_check_obs.yml b/esmvaltool/recipes/examples/recipe_check_obs.yml index 36b65eb472..880aef831a 100644 --- a/esmvaltool/recipes/examples/recipe_check_obs.yml +++ b/esmvaltool/recipes/examples/recipe_check_obs.yml @@ -61,6 +61,16 @@ diagnostics: scripts: null + CMAP: + description: CMAP check + variables: + pr: + additional_datasets: + - {project: OBS6, dataset: CMAP, mip: Amon, tier: 2, + type: reanaly, version: v1} + scripts: null + + CRU: description: CRU check variables: From f18fe9c0a630ee9a389425a4aed3925119faa018 Mon Sep 17 00:00:00 2001 From: Valeriu Predoi Date: Tue, 29 Oct 2024 12:37:53 +0000 Subject: [PATCH 07/36] Pin pys2index >=0.1.5 in osx environment (#3792) --- environment_osx.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environment_osx.yml b/environment_osx.yml index 07fdf96de7..8285b43ecd 100644 --- a/environment_osx.yml +++ b/environment_osx.yml @@ -52,7 +52,7 @@ dependencies: - psy-reg >=1.5.0 - psy-simple >=1.5.0 - pyproj >=2.1 - - pys2index # only from conda-forge + - pys2index >=0.1.5 # only from conda-forge; https://github.com/ESMValGroup/ESMValTool/pull/3792 - python >=3.10,<3.13 - python-cdo - python-dateutil From 0961c45d29a86a949f946baca238757d4152856f Mon Sep 17 00:00:00 2001 From: Manuel Schlund <32543114+schlunma@users.noreply.github.com> Date: Wed, 30 Oct 2024 12:16:47 +0100 Subject: [PATCH 08/36] Use `transform_first=True` for contourf plots with Robinson projection to avoid cartopy bug (#3789) --- esmvaltool/cmorizers/data/formatters/datasets/cmap.py | 2 +- esmvaltool/diag_scripts/monitor/multi_datasets.py | 9 +++++++++ esmvaltool/diag_scripts/shared/plot/_plot.py | 1 + 3 files changed, 11 insertions(+), 1 deletion(-) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/cmap.py b/esmvaltool/cmorizers/data/formatters/datasets/cmap.py index 656942b49a..fecd2b128e 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/cmap.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/cmap.py @@ -25,8 +25,8 @@ from pathlib import Path import iris -from esmvaltool.cmorizers.data import utilities as utils +from esmvaltool.cmorizers.data import utilities as utils logger = logging.getLogger(__name__) diff --git a/esmvaltool/diag_scripts/monitor/multi_datasets.py b/esmvaltool/diag_scripts/monitor/multi_datasets.py index 32f654b3b6..068c4033da 100644 --- a/esmvaltool/diag_scripts/monitor/multi_datasets.py +++ b/esmvaltool/diag_scripts/monitor/multi_datasets.py @@ -1176,6 +1176,9 @@ def _plot_map_with_ref(self, plot_func, dataset, ref_dataset): axes_data = fig.add_subplot(gridspec[0:2, 0:2], projection=projection) plot_kwargs['axes'] = axes_data + if plot_func is iris.plot.contourf: + # see https://github.com/SciTools/cartopy/issues/2457 + plot_kwargs['transform_first'] = True plot_data = plot_func(cube, **plot_kwargs) axes_data.coastlines() if gridline_kwargs is not False: @@ -1212,6 +1215,9 @@ def _plot_map_with_ref(self, plot_func, dataset, ref_dataset): plot_kwargs_bias = self._get_plot_kwargs(plot_type, dataset, bias=True) plot_kwargs_bias['axes'] = axes_bias + if plot_func is iris.plot.contourf: + # see https://github.com/SciTools/cartopy/issues/2457 + plot_kwargs_bias['transform_first'] = True plot_bias = plot_func(bias_cube, **plot_kwargs_bias) axes_bias.coastlines() if gridline_kwargs is not False: @@ -1268,6 +1274,9 @@ def _plot_map_without_ref(self, plot_func, dataset): axes = fig.add_subplot(projection=self._get_map_projection()) plot_kwargs = self._get_plot_kwargs(plot_type, dataset) plot_kwargs['axes'] = axes + if plot_func is iris.plot.contourf: + # see https://github.com/SciTools/cartopy/issues/2457 + plot_kwargs['transform_first'] = True plot_map = plot_func(cube, **plot_kwargs) axes.coastlines() gridline_kwargs = self._get_gridline_kwargs(plot_type) diff --git a/esmvaltool/diag_scripts/shared/plot/_plot.py b/esmvaltool/diag_scripts/shared/plot/_plot.py index d7db4e1b14..66f1e82c08 100644 --- a/esmvaltool/diag_scripts/shared/plot/_plot.py +++ b/esmvaltool/diag_scripts/shared/plot/_plot.py @@ -228,6 +228,7 @@ def global_contourf(cube, if cbar_range is not None: levels = np.linspace(*cbar_range) kwargs['levels'] = levels + kwargs['transform_first'] = True # see SciTools/cartopy/issues/2457 axes = plt.axes(projection=ccrs.Robinson(central_longitude=10)) plt.sca(axes) map_plot = iris.plot.contourf(cube, **kwargs) From 4f5d049ff2eec9d054d77c4eb34b6a69eba0ee7f Mon Sep 17 00:00:00 2001 From: sloosvel <45196700+sloosvel@users.noreply.github.com> Date: Wed, 30 Oct 2024 20:22:39 +0100 Subject: [PATCH 09/36] Add next release schedule (#3794) Co-authored-by: Valeriu Predoi --- .../release_strategy/release_strategy.rst | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/doc/sphinx/source/community/release_strategy/release_strategy.rst b/doc/sphinx/source/community/release_strategy/release_strategy.rst index b95bab67b1..72c55266dd 100644 --- a/doc/sphinx/source/community/release_strategy/release_strategy.rst +++ b/doc/sphinx/source/community/release_strategy/release_strategy.rst @@ -53,7 +53,20 @@ With the following release schedule, we strive to have three releases per year a Upcoming releases ^^^^^^^^^^^^^^^^^ -- 2.12.0 (TBD) +- 2.12.0 (Release Manager: `Saskia Loosveldt Tomas`_) + ++------------+------------+----------------------------------------+-------------------------------------+ +| Planned | Done | Event | Changelog | ++============+============+========================================+=====================================+ +| 2025-01-13 | | ESMValCore `Feature Freeze`_ | | ++------------+------------+----------------------------------------+-------------------------------------+ +| 2025-01-20 | | ESMValCore Release 2.12.0 | | ++------------+------------+----------------------------------------+-------------------------------------+ +| 2025-01-27 | | ESMValTool `Feature Freeze`_ | | ++------------+------------+----------------------------------------+-------------------------------------+ +| 2025-02-03 | | ESMValTool Release 2.12.0 | | ++------------+------------+----------------------------------------+-------------------------------------+ + Past releases ^^^^^^^^^^^^^ From f64a3db5290934fba56423d5788b41a95dded5d2 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 31 Oct 2024 13:44:33 +0000 Subject: [PATCH 10/36] [Condalock] Update Linux condalock file (#3796) Co-authored-by: valeriupredoi --- conda-linux-64.lock | 111 ++++++++++++++++++++++---------------------- 1 file changed, 56 insertions(+), 55 deletions(-) diff --git a/conda-linux-64.lock b/conda-linux-64.lock index 5535cdcaa0..1b089cf458 100644 --- a/conda-linux-64.lock +++ b/conda-linux-64.lock @@ -11,19 +11,19 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda#49023d73832ef61042f6a237cb2687e7 -https://conda.anaconda.org/conda-forge/noarch/kernel-headers_linux-64-3.10.0-he073ed8_17.conda#285931bd28b3b8f176d46dd9fd627a09 +https://conda.anaconda.org/conda-forge/noarch/kernel-headers_linux-64-3.10.0-he073ed8_18.conda#ad8527bf134a90e1c9ed35fa0b64318c https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.5-ha770c72_0.conda#2889e6b9c666c3a564ab90cedc5832fd https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda#d8d7293c5b37f39b2ac32940621c6592 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-5_cp312.conda#0424ae29b104430108f5218a66db7260 https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda#8ac3367aafb1cc0a068483c580af8015 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.43-h712a8e2_1.conda#83e1364586ceb8d0739fbc85b5c95837 +https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.43-h712a8e2_2.conda#048b02e3962f066da18efe3a21b77672 https://conda.anaconda.org/conda-forge/noarch/libgcc-devel_linux-64-14.2.0-h41c2201_101.conda#fb126e22f5350c15fec6ddbd062f4871 https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.2.0-h77fa898_1.conda#cc3573974587f12dda90d96e3e55a702 https://conda.anaconda.org/conda-forge/noarch/libstdcxx-devel_linux-64-14.2.0-h41c2201_101.conda#60b9a16fd147f7184b5a964aa08f3b0f -https://conda.anaconda.org/conda-forge/noarch/sysroot_linux-64-2.17-h4a8ded7_17.conda#f58cb23983633068700a756f0b5f165a +https://conda.anaconda.org/conda-forge/noarch/sysroot_linux-64-2.17-h4a8ded7_18.conda#0ea96f90a10838f58412aa84fdd9df09 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d -https://conda.anaconda.org/conda-forge/linux-64/binutils_impl_linux-64-2.43-h4bf12b8_1.conda#5f354010f194e85dc681dec92405ef9e +https://conda.anaconda.org/conda-forge/linux-64/binutils_impl_linux-64-2.43-h4bf12b8_2.conda#cf0c5521ac2a20dfa6c662a4009eeef6 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.2.0-h77fa898_1.conda#3cb76c3f10d3bc7f1105b2fc9db984df https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.28-hb9d3cd8_0.conda#1b53af320b24547ce0fb8196d2604542 @@ -75,7 +75,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libopenlibm4-0.8.1-hd590300_1.co https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.44-hadc24fc_0.conda#f4cc49d7aa68316213e4b12be35308d1 https://conda.anaconda.org/conda-forge/linux-64/libsanitizer-14.2.0-h2a3dede_1.conda#160623b9425f5c04941586da43bd1a9c https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.20-h4ab18f5_0.conda#a587892d3c13b6621a6091be690dbca2 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.1-hadc24fc_0.conda#36f79405ab16bf271edb55b213836dac +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.0-hadc24fc_1.conda#b6f02b52a174e612e89548f4663ce56a https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda#8371ac6457591af2cf6159439c1fd051 https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 @@ -87,10 +87,12 @@ https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.cond https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-hd590300_1001.conda#ec7398d21e2651e0dcb0044d03b9a339 https://conda.anaconda.org/conda-forge/linux-64/metis-5.1.0-hd0bcaf9_1007.conda#28eb714416de4eb83e2cbc47e99a1b45 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda#70caf8bb6cf39a0b6b7efc885f51c0fe +https://conda.anaconda.org/conda-forge/linux-64/nspr-4.36-h5888daf_0.conda#de9cd5bca9e4918527b9b72b6e2e1409 https://conda.anaconda.org/conda-forge/linux-64/pkg-config-0.29.2-h4bc722e_1009.conda#1bee70681f504ea424fb07cdb090c001 https://conda.anaconda.org/conda-forge/linux-64/rav1e-0.6.6-he8a937b_2.conda#77d9955b4abddb811cb8ab1aa7d743e4 https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.5-h3931f03_0.conda#334dba9982ab9f5d62033c61698a8683 https://conda.anaconda.org/conda-forge/linux-64/sed-4.8-he412f7d_0.tar.bz2#7362f0042e95681f5d371c46c83ebd08 +https://conda.anaconda.org/conda-forge/linux-64/svt-av1-2.3.0-h5888daf_0.conda#355898d24394b2af353eb96358db9fdd https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc https://conda.anaconda.org/conda-forge/linux-64/xorg-imake-1.0.10-h5888daf_0.conda#040f0ca9f518151897759ad09ea98b2d https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 @@ -125,15 +127,15 @@ https://conda.anaconda.org/conda-forge/linux-64/libgettextpo-devel-0.22.5-he0204 https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.2.0-h69a702a_1.conda#0a7f4cd238267c88e5d69f7826a407eb https://conda.anaconda.org/conda-forge/linux-64/libhwy-1.1.0-h00ab1b0_0.conda#88928158ccfe797eac29ef5e03f7d23d https://conda.anaconda.org/conda-forge/linux-64/libllvm14-14.0.6-hcd5def8_4.conda#73301c133ded2bf71906aa2104edae8b -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_1.conda#700ac6ea6d53d5510591c4344d5c989a +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.64.0-h161d5f1_0.conda#19e57602824042dfd0446292ef90488b https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.20.0-h0e7cc3e_1.conda#d0ed81c4591775b70384f4cc78e05cd1 https://conda.anaconda.org/conda-forge/linux-64/libunwind-1.6.2-h9c3ff4c_0.tar.bz2#a730b2badd586580c5752cc73842e068 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.1-hf83b1b0_0.conda#e8536ec89df2aec5f65fefcf4ccd58ba https://conda.anaconda.org/conda-forge/linux-64/libzopfli-1.0.3-h9c3ff4c_0.tar.bz2#c66fe2d123249af7651ebde8984c51c2 https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/mbedtls-3.5.1-h59595ed_0.conda#a7b444a6e008b804b35521895e3440e2 -https://conda.anaconda.org/conda-forge/linux-64/nccl-2.23.4.1-h03a54cd_0.conda#84df066b3b35c59a697af6066137b2a6 -https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 +https://conda.anaconda.org/conda-forge/linux-64/nccl-2.23.4.1-h03a54cd_2.conda#a08604ac3f9c3dbd128bb24e089dee5f +https://conda.anaconda.org/conda-forge/linux-64/nss-3.106-hdf54f9c_0.conda#efe735c7dc47dddbb14b3433d11c6feb https://conda.anaconda.org/conda-forge/linux-64/openlibm-0.8.1-hd590300_1.conda#6eba22eb06d69e53d0ca01eef42bc675 https://conda.anaconda.org/conda-forge/linux-64/p7zip-16.02-h9c3ff4c_1001.tar.bz2#941066943c0cac69d5aa52189451aa5f https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda#df359c09c41cd186fffb93a2d87aa6f5 @@ -142,7 +144,6 @@ https://conda.anaconda.org/conda-forge/linux-64/pixman-0.43.2-h59595ed_0.conda#7 https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda#353823361b1d27eb3960efb076dfcaf6 https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-ha2e4443_0.conda#6b7dcc7349efd123d493d2dbe85a045f -https://conda.anaconda.org/conda-forge/linux-64/svt-av1-2.2.1-h5888daf_0.conda#0d9c441855be3d8dfdb2e800fe755059 https://conda.anaconda.org/conda-forge/linux-64/tktable-2.10-h8bc8fbc_6.conda#dff3627fec2c0584ded391205295abf0 https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/uriparser-0.9.8-hac33072_0.conda#d71d3a66528853c0a1ac2c02d79a0284 @@ -157,32 +158,31 @@ https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda#54 https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f https://conda.anaconda.org/conda-forge/linux-64/c-blosc2-2.15.1-hc57e6cf_0.conda#5f84961d86d0ef78851cb34f9d5e31fe https://conda.anaconda.org/conda-forge/linux-64/fftw-3.3.10-nompi_hf1063bd_110.conda#ee3e687b78b778db7b304e5b00a4dca6 -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee https://conda.anaconda.org/conda-forge/linux-64/gfortran_impl_linux-64-14.2.0-hc73f493_1.conda#131a59b3bb1dbbfc63ec0f21eb0e8c65 https://conda.anaconda.org/conda-forge/linux-64/gxx_impl_linux-64-14.2.0-h2c03514_1.conda#41664acd4c99ef4d192e12950ff68ca6 https://conda.anaconda.org/conda-forge/linux-64/hdfeos2-2.20-h3e53b52_1004.conda#c21dc684e0e8efa507aba61a030f65e7 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 https://conda.anaconda.org/conda-forge/linux-64/libasprintf-devel-0.22.5-he8f35ee_3.conda#1091193789bb830127ed067a9e01ac57 -https://conda.anaconda.org/conda-forge/linux-64/libavif16-1.1.1-h104a339_1.conda#9ef052c2eee74c792833ac2e820e481e -https://conda.anaconda.org/conda-forge/linux-64/libgit2-1.8.1-he8d1d4c_1.conda#febd0520afc041dd938acdce0f26d71b +https://conda.anaconda.org/conda-forge/linux-64/libavif16-1.1.1-h1909e37_2.conda#21e468ed3786ebcb2124b123aa2484b7 +https://conda.anaconda.org/conda-forge/linux-64/libgit2-1.8.4-hd24f944_0.conda#94887b4deb460378a34e1533beaacfd5 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_0.conda#13e8e54035ddd2b91875ba399f0f7c04 https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.0-hdb8da77_2.conda#9c4554fafc94db681543804037e65de2 https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-hf539b9f_1021.conda#e8c7620cc49de0c6a2349b6dd6e39beb -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.27-pthreads_hac2b453_1.conda#ae05ece66d3924ac3d48b4aa3fa96cec +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_0.conda#9ebc9aedafaa2515ab247ff6bb509458 https://conda.anaconda.org/conda-forge/linux-64/libopenblas-ilp64-0.3.28-pthreads_h3e26593_0.conda#2bd7dc48907a3b6bf766ed87867f3459 https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-4.25.3-hd5b35b9_1.conda#06def97690ef90781a91b786cb48a0a9 https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2023.09.01-h5a48ba9_2.conda#41c69fba59d495e8cf5ffda48a607e35 https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-hc670b87_16.conda#3d9f3a2e5d7213c34997e4464d2f938c https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-h6565414_0.conda#80eaf80d84668fa5620ac9ec1b4bf56f -https://conda.anaconda.org/conda-forge/linux-64/libxgboost-2.1.1-cuda118_h09a87be_4.conda#b11b225202c3fd2ac6767ddc7e5d094f -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.7-he7c6b58_4.conda#08a9265c637230c37cb1be4a6cad4536 +https://conda.anaconda.org/conda-forge/linux-64/libxgboost-2.1.2-cuda118_h09a87be_0.conda#d59c3f95f80071f24ebce434494ead0a +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.4-hb346dea_1.conda#21f1e3d43686bc70bd98cc62a431a2cf https://conda.anaconda.org/conda-forge/linux-64/minizip-4.0.7-h401b404_0.conda#4474532a312b2245c5c77f1176989b46 https://conda.anaconda.org/conda-forge/linux-64/mpfr-4.2.1-h90cbb55_3.conda#2eeb50cab6652538eee8fc0bc3340c81 -https://conda.anaconda.org/conda-forge/linux-64/nss-3.105-hd34e28f_0.conda#28d7602527b76052422aaf5d6fd7ad81 https://conda.anaconda.org/conda-forge/linux-64/python-3.12.7-hc5c86c4_0_cpython.conda#0515111a9cdf69f83278f7c197db9807 https://conda.anaconda.org/conda-forge/linux-64/s2geometry-0.10.0-h8413349_4.conda#d19f88cf8812836e6a4a2a7902ed0e77 https://conda.anaconda.org/conda-forge/linux-64/spdlog-1.14.1-hed91bc2_1.conda#909188c8979846bac8e586908cf1ca6a -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.46.1-h9eae976_0.conda#b2b3e737da0ae347e16ef1970a5d3f14 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.0-h9eae976_1.conda#53abf1ef70b9ae213b22caa5350f97a9 https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.5-h988505b_2.conda#9dda9667feba914e0e80b95b82f7402b https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda#febbab7d15033c913d53c7a2c102309d https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.1-hb9d3cd8_0.conda#4bdb303603e9821baf5fe5fdff1dc8f8 @@ -221,15 +221,15 @@ https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_0.conda https://conda.anaconda.org/conda-forge/noarch/dodgy-0.2.1-py_0.tar.bz2#62a69d073f7446c90f417b0787122f5b https://conda.anaconda.org/conda-forge/noarch/ecmwf-api-client-1.6.3-pyhd8ed1ab_0.tar.bz2#15621abf59053e184114d3e1d4f9d01e https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2#3cf04868fee0a029769bd41f4b2fbf2d -https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-1.1.0-pyhd8ed1ab_0.conda#a2f2138597905eaa72e561d8efb42cf3 +https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-2.0.0-pyhd8ed1ab_0.conda#cdcdbe90dfab4075fc1f3c4cf2e4b4e5 https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda#d02ae936e42063ca46af6cdad2dbd1e0 https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_0.conda#15dda3cdbf330abfe9f555d22f66db46 https://conda.anaconda.org/conda-forge/noarch/fasteners-0.17.3-pyhd8ed1ab_0.tar.bz2#348e27e78a5e39090031448c72f66d5e https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_0.conda#916f8ec5dd4128cd5f207a3c4c07b2c6 https://conda.anaconda.org/conda-forge/noarch/findlibs-0.0.5-pyhd8ed1ab_0.conda#8f325f63020af6f7acbe2c4cb4c920db https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h743c826_0.conda#12e6988845706b2cfbc3bc35c9a61a95 -https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.4.1-py312h66e93f0_1.conda#0ad3232829b9509599d8f981c12c9d05 -https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.9.0-pyhff2d567_0.conda#ace4329fbff4c69ab0309db6da182987 +https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.5.0-py312h66e93f0_0.conda#f98e36c96b2c66d9043187179ddb04f4 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.10.0-pyhff2d567_0.conda#816dbc4679a64e4417cd1385d661bb31 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda#201db6c2d9a3c5e46573ac4cb2e92f4f https://conda.anaconda.org/conda-forge/noarch/geographiclib-2.0-pyhd8ed1ab_0.tar.bz2#6b1f32359fc5d2ab7b491d0029bfffeb https://conda.anaconda.org/conda-forge/linux-64/gettext-0.22.5-he02047a_3.conda#c7f243bbaea97cd6ea1edd693270100e @@ -244,8 +244,9 @@ https://conda.anaconda.org/conda-forge/noarch/isodate-0.7.2-pyhd8ed1ab_0.conda#d https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_0.conda#ff7ca04134ee8dde1d7cf491a78ef7c7 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.7-py312h68727a3_0.conda#444266743652a4f1538145e9362f6d3b https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 +https://conda.anaconda.org/conda-forge/noarch/legacy-cgi-2.6.1-pyh5b84bb0_3.conda#f258b7f54b5d9ddd02441f10c4dca2ac https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.7.4-hfca40fe_0.conda#32ddb97f897740641d8d46a829ce1704 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-24_linux64_openblas.conda#80aea6603a6813b16ec119d00382b772 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-25_linux64_openblas.conda#8ea26d42ca88ec5258802715fe1ee10b https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.9.1-hdb1bdb2_0.conda#7da1d242ca3591e174a3c7d82230d3c0 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hd3e95f3_10.conda#30ee3a29c84cf7b842a8c5828c4b7c13 https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.0-ha6d2627_1004.conda#df069bea331c8486ac21814969301c1f @@ -264,7 +265,7 @@ https://conda.anaconda.org/conda-forge/linux-64/multidict-6.1.0-py312h178313f_1. https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda#4eccaeba205f0aed9ac3a9ea58568ca3 https://conda.anaconda.org/conda-forge/noarch/natsort-8.4.0-pyhd8ed1ab_0.conda#70959cd1db3cf77b2a27a0836cfd08a7 -https://conda.anaconda.org/conda-forge/noarch/networkx-3.4.1-pyhd8ed1ab_0.conda#4994669899eb2e84ab855edcb71efc58 +https://conda.anaconda.org/conda-forge/noarch/networkx-3.4.2-pyhd8ed1ab_1.conda#1d4c088869f206413c59acdd309908b7 https://conda.anaconda.org/conda-forge/linux-64/openblas-ilp64-0.3.28-pthreads_h3d04fff_0.conda#eb2736b14329cf5650917caa43a549c6 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.2-h488ebb8_0.conda#7f2e286780f072ed750df46dc2631138 https://conda.anaconda.org/conda-forge/linux-64/orc-2.0.2-h669347b_0.conda#1e6c10f7d749a490612404efeb179eb8 @@ -275,7 +276,7 @@ https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8e https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_0.conda#fd8f2b18b65bbf62e8f653100690c8d2 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda#d3483c8fc2dc2cc3f5cf43e26d60cabf https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.0-py312h66e93f0_2.conda#2c6c0c68f310bc33972e7c83264d7786 -https://conda.anaconda.org/conda-forge/linux-64/psutil-6.0.0-py312h66e93f0_2.conda#e6d115113d912f9c2cc8cddddac20d61 +https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.0-py312h66e93f0_0.conda#0524eb91d3d78d76d671c6e3cd7cee82 https://conda.anaconda.org/conda-forge/noarch/pycodestyle-2.12.1-pyhd8ed1ab_0.conda#72453e39709f38d0494d096bb5f678b7 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda#844d9eb3b43095b031874477f7d70088 https://conda.anaconda.org/conda-forge/noarch/pyflakes-3.2.0-pyhd8ed1ab_0.conda#0cf7fef6aa123df28adb21a590065e3d @@ -311,12 +312,12 @@ https://conda.anaconda.org/conda-forge/noarch/tomlkit-0.13.2-pyha770c72_0.conda# https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_0.conda#34feccdd4177f2d3d53c73fc44fd9a37 https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.1-py312h66e93f0_1.conda#af648b62462794649066366af4ecd5b0 https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_0.conda#3df84416a021220d8b5700c613af2dc5 -https://conda.anaconda.org/conda-forge/noarch/trove-classifiers-2024.10.16-pyhd8ed1ab_0.conda#dfd9748c73bc264c3f634d1345ee8210 +https://conda.anaconda.org/conda-forge/noarch/trove-classifiers-2024.10.21.16-pyhd8ed1ab_0.conda#501f6d3288160a31d99a2f1321e77393 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda#ebe6952715e1d5eb567eeebf25250fa7 https://conda.anaconda.org/conda-forge/linux-64/ujson-5.10.0-py312h2ec8cdc_1.conda#96226f62dddc63226472b7477d783967 +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py312h66e93f0_1.conda#588486a61153f94c7c13816f7069e440 https://conda.anaconda.org/conda-forge/noarch/untokenize-0.1.1-pyhd8ed1ab_1.conda#6042b782b893029aa40335782584a092 https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda#daf5160ff9cde3a468556965329085b9 -https://conda.anaconda.org/conda-forge/noarch/webob-1.8.8-pyhd8ed1ab_0.conda#ae69b699c308c3bd20388219764235b0 https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda#d44e3b085abcaef02983c6305b84b584 https://conda.anaconda.org/conda-forge/noarch/xlsxwriter-3.2.0-pyhd8ed1ab_0.conda#a1f7264726115a2f8eac9773b1f27eba https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 @@ -332,19 +333,19 @@ https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.6.5-hbaf354b_4.conda# https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.13.0-h935415a_0.conda#debd1677c2fea41eb2233a260f48a298 https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda#9669586875baeced8fc30c0826c3270e https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda#332493000404d8411859539a5a630865 -https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda#0ed9d7c0e9afa7c025807a9a8136ea3e +https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyhd8ed1ab_0.conda#461bcfab8e65c166e297222ae919a2d4 https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h06ac9bb_0.conda#a861504bbea4161a9170b85d4d2be840 https://conda.anaconda.org/conda-forge/linux-64/cfitsio-4.4.1-hf8ad068_0.conda#1b7a01fd02d11efe0eb5a676842a7b7d https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2#4fd2c6b53934bd7d96d1f3fdaf99b79f https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2#a29b7c141d6b2de4bb67788a5f107734 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.3-py312h178313f_1.conda#2621104ac246594948615017c1254c66 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.4-py312h178313f_0.conda#a32fbd2322865ac80c7db74c553f5306 https://conda.anaconda.org/conda-forge/linux-64/curl-8.9.1-h18eb788_0.conda#2e7dedf73dfbfcee662e2a0f6175e4bb https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.0-py312h66e93f0_1.conda#a921e2fe122e7f38417b9b17c7a13343 https://conda.anaconda.org/conda-forge/noarch/docformatter-1.7.5-pyhd8ed1ab_0.conda#3a941b6083e945aa87e739a9b85c82e9 https://conda.anaconda.org/conda-forge/noarch/docrep-0.3.2-pyh44b312d_0.tar.bz2#235523955bc1bfb019d7ec8a2bb58f9a https://conda.anaconda.org/conda-forge/noarch/fire-0.7.0-pyhd8ed1ab_0.conda#c8eefdf1e822c56a6034602e67bc92a5 https://conda.anaconda.org/conda-forge/noarch/flake8-7.1.1-pyhd8ed1ab_0.conda#a25e5df6b26be3c2d64be307c1ef0b37 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.54.1-py312h66e93f0_0.conda#e311030d9322f6f77e71e013490c83b2 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.54.1-py312h178313f_1.conda#bbbf5fa5cab622c33907bc8d7eeea9f7 https://conda.anaconda.org/conda-forge/linux-64/freeglut-3.2.2-ha6d2627_3.conda#84ec3f5b46f3076be49f2cf3f1cfbf02 https://conda.anaconda.org/conda-forge/noarch/geopy-2.4.1-pyhd8ed1ab_1.conda#358c17429c97883b2cb9ab5f64bc161b https://conda.anaconda.org/conda-forge/linux-64/git-2.46.0-pl5321hb5640b7_0.conda#825d146359bc8b85083d92259d0a0e1b @@ -352,7 +353,6 @@ https://conda.anaconda.org/conda-forge/noarch/gitdb-4.0.11-pyhd8ed1ab_0.conda#62 https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2#b748fbf7060927a6e82df7cb5ee8f097 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-9.0.0-hda332d3_1.conda#76b32dcf243444aea9c6b804bcfa40b8 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.3-nompi_hdf9ad27_105.conda#7e1729554e209627636a0f6fabcdd115 -https://conda.anaconda.org/conda-forge/noarch/html5lib-1.1-pyhd8ed1ab_1.conda#51862c722035f53c5d99ae99a78ea569 https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-7.2.1-pyha770c72_0.conda#b9f5330c0853ccabc39a9878c6f1a2ab https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.5-pyhd8ed1ab_0.conda#c808991d29b9838fb4d96ce8267ec9ec https://conda.anaconda.org/conda-forge/noarch/isort-5.13.2-pyhd8ed1ab_0.conda#1d25ed2b95b92b026aaa795eabec8d91 @@ -361,18 +361,18 @@ https://conda.anaconda.org/conda-forge/noarch/joblib-1.4.2-pyhd8ed1ab_0.conda#25 https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh31011fe_1.conda#0a2980dada0dd7fd0998f0342308b1b1 https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_1.conda#afcd1b53bcac8844540358e33f33d28f https://conda.anaconda.org/conda-forge/noarch/latexcodec-2.0.1-pyh9f0ad1d_0.tar.bz2#8d67904973263afd2985ba56aa2d6bb4 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-24_linux64_openblas.conda#f5b8822297c9c790cec0795ca1fc9be6 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-25_linux64_openblas.conda#5dbd1b0fc0d01ec5e0e1fbe667281a11 https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.62.2-h15f2491_0.conda#8dabe607748cb3d7002ad73cd06f1325 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-24_linux64_openblas.conda#fd540578678aefe025705f4b58b36b2e +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-25_linux64_openblas.conda#4dc03a53fc69371a6158d0ed37214cd3 https://conda.anaconda.org/conda-forge/noarch/logilab-common-1.7.3-py_0.tar.bz2#6eafcdf39a7eb90b6d951cfff59e8d3b -https://conda.anaconda.org/conda-forge/linux-64/lxml-5.3.0-py312he28fd5a_1.conda#4bc1e0dda9208b8934333d878dde4996 +https://conda.anaconda.org/conda-forge/linux-64/lxml-5.3.0-py312he28fd5a_2.conda#3acf38086326f49afed094df4ba7c9d9 https://conda.anaconda.org/conda-forge/noarch/nested-lookup-0.2.25-pyhd8ed1ab_1.tar.bz2#2f59daeb14581d41b1e2dda0895933b2 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda#dfe0528d0f1c16c1f7c528ea5536ab30 https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.8-hedd0468_0.conda#dcd0ed5147d8876b0848a552b416ce76 https://conda.anaconda.org/conda-forge/linux-64/openpyxl-3.1.5-py312h710cb58_1.conda#69a8838436435f59d72ddcb8dfd24a28 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 https://conda.anaconda.org/conda-forge/linux-64/pillow-11.0.0-py312h7b63e92_0.conda#385f46a4df6f97892503a841121a9acf -https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda#6c78fbb8ddfd64bcb55b5cbafd2d2c43 +https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh8b19718_0.conda#5dd546fe99b44fda83963d15f84263b7 https://conda.anaconda.org/conda-forge/noarch/plotly-5.24.1-pyhd8ed1ab_0.conda#81bb643d6c3ab4cbeaf724e9d68d0a6a https://conda.anaconda.org/conda-forge/linux-64/poppler-24.08.0-h47131b8_1.conda#0854b9ff0cc10a1f6f67b0f352b8e75a https://conda.anaconda.org/conda-forge/linux-64/proj-9.4.1-h54d7996_1.conda#e479d1991c725e1a355f33c0e40dbc66 @@ -382,20 +382,22 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda#c0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda#2cf4264fffb9e6eff6031c5b6884d61c https://conda.anaconda.org/conda-forge/noarch/python-utils-3.8.2-pyhd8ed1ab_0.conda#89703b4f38bd1c0353881f085bc8fdaa https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.2.0-py312hbf22597_3.conda#746ce19f0829ec3e19c93007b1a224d3 +https://conda.anaconda.org/conda-forge/noarch/rdflib-7.1.1-pyh0610db2_0.conda#325219de79481bcf5b6446d327e3d492 https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_0.conda#0fc8b52192a8898627c3efae1003e9f6 https://conda.anaconda.org/conda-forge/noarch/requirements-detector-1.3.1-pyhd8ed1ab_0.conda#f921ea6a1138cc7edee77de8ed12b226 https://conda.anaconda.org/conda-forge/noarch/retrying-1.3.3-pyhd8ed1ab_3.conda#1f7482562f2082f1b2abf8a3e2a41b63 https://conda.anaconda.org/conda-forge/linux-64/ruamel.yaml-0.18.6-py312h66e93f0_1.conda#28ed869ade5601ee374934a31c9d628e https://conda.anaconda.org/conda-forge/linux-64/tbb-2021.13.0-h84d6215_0.conda#ee6f7fd1e76061ef1fa307d41fa86a96 -https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.3.0-pyhd8ed1ab_0.conda#8662629d9a05f9cff364e31ca106c1ac -https://conda.anaconda.org/conda-forge/noarch/tqdm-4.66.5-pyhd8ed1ab_0.conda#c6e94fc2b2ec71ea33fe7c7da259acb4 +https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.4.0-pyhd8ed1ab_0.conda#f1acf5fdefa8300de697982bcb1761c9 +https://conda.anaconda.org/conda-forge/noarch/tqdm-4.66.6-pyhd8ed1ab_0.conda#92718e1f892e1e4623dcc59b9f9c4e55 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda#52d648bd608f5737b123f510bb5514b5 https://conda.anaconda.org/conda-forge/noarch/url-normalize-1.4.3-pyhd8ed1ab_0.tar.bz2#7c4076e494f0efe76705154ac9302ba6 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.0-pyhd8ed1ab_0.conda#a6ed1227ba6ec37cfc2b25e6512f729f +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.1-pyhd8ed1ab_0.conda#dae21509d62aa7bf676279ced3edcb3f +https://conda.anaconda.org/conda-forge/noarch/webob-1.8.9-pyhd8ed1ab_0.conda#ff98f23ad74d2a3256debcd9df65d37d https://conda.anaconda.org/conda-forge/linux-64/xorg-libxpm-3.5.17-hb9d3cd8_1.conda#f35bec7fface97f67f44ca952fc740b7 https://conda.anaconda.org/conda-forge/noarch/yamale-5.2.1-pyhca7485f_0.conda#c089f90a086b6214c5606368d0d3bad0 https://conda.anaconda.org/conda-forge/noarch/yamllint-1.35.1-pyhd8ed1ab_0.conda#a1240b99a7ccd953879dc63111823986 -https://conda.anaconda.org/conda-forge/linux-64/yarl-1.15.5-py312h66e93f0_0.conda#a17fd28f7b4b77527218535fddb8acf5 +https://conda.anaconda.org/conda-forge/linux-64/yarl-1.16.0-py312h66e93f0_0.conda#c3f4a6b56026c22319bf31514662b283 https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.10.10-py312h178313f_0.conda#d2f9e490ab2eae3e661b281346618a82 https://conda.anaconda.org/conda-forge/linux-64/arpack-3.9.1-nompi_h77f6705_101.conda#ff39030debb47f6b53b45bada38e0903 https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.28.2-h6c0439f_6.conda#4e472c316d08af60faeb71f86d7563e1 @@ -425,13 +427,12 @@ https://conda.anaconda.org/conda-forge/noarch/progressbar2-4.5.0-pyhd8ed1ab_0.co https://conda.anaconda.org/conda-forge/noarch/pybtex-0.24.0-pyhd8ed1ab_2.tar.bz2#2099b86a7399c44c0c61cdb6de6915ba https://conda.anaconda.org/conda-forge/noarch/pylint-3.3.1-pyhd8ed1ab_0.conda#2a3426f75e2172c932131f4e3d51bcf4 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py312h9211aeb_9.conda#173afeb0d112c854fd1a9fcac4b5cce3 -https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda#c54c0107057d67ddf077751339ec2c63 +https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_0.conda#cb8a11b6d209e3d85e5094bdbd9ebd9c https://conda.anaconda.org/conda-forge/noarch/pytest-env-1.1.5-pyhd8ed1ab_0.conda#ecd5e850bcd3eca02143e7df030ee50f https://conda.anaconda.org/conda-forge/noarch/pytest-metadata-3.1.1-pyhd8ed1ab_0.conda#52b91ecba854d55b28ad916a8b10da24 https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_0.conda#4b9b5e086812283c052a9105ab1e254e https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda#b39568655c127a9c4a44d178ac99b6d0 https://conda.anaconda.org/conda-forge/noarch/python-build-1.2.2.post1-pyhff2d567_0.conda#bd5ae3c630d5eed353badb091fd3e603 -https://conda.anaconda.org/conda-forge/noarch/rdflib-6.2.0-pyhd8ed1ab_0.tar.bz2#b9acd5fbaf467f7447746b1ecac50e83 https://conda.anaconda.org/conda-forge/linux-64/suitesparse-7.8.2-hb42a789_0.conda#b7d1ce5a599ec2caf69673f5beff7696 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312h68727a3_5.conda#f9664ee31aed96c85b7319ab0a693341 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxaw-1.0.16-hb9d3cd8_0.conda#7c0a9bf62d573409d12ad14b362a96e5 @@ -442,7 +443,7 @@ https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py312hc0a28a1_1.con https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyh24bf2e0_0.tar.bz2#b73afa0d009a51cabd3ec99c4d2ef4f3 https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.0-py312h68727a3_2.conda#ff28f374b31937c048107521c814791e https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.10.0-pyhd8ed1ab_0.conda#7823092a3cf14e98a52d2a2875c47c80 -https://conda.anaconda.org/conda-forge/linux-64/eccodes-2.38.0-h8bb6dbc_0.conda#30ca97df26e33cd48444586e9d088e9a +https://conda.anaconda.org/conda-forge/linux-64/eccodes-2.38.3-h8bb6dbc_1.conda#73265d4acc551063cc5c5beab37f33c5 https://conda.anaconda.org/conda-forge/noarch/eofs-1.4.1-pyhd8ed1ab_1.conda#5fc43108dee4106f23050acc7a101233 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h6470451_5.conda#1483ba046164be27df7f6eddbcec3a12 https://conda.anaconda.org/conda-forge/noarch/identify-2.6.1-pyhd8ed1ab_0.conda#43f629202f9eec21be5f71171fb5daf8 @@ -564,11 +565,11 @@ https://conda.anaconda.org/conda-forge/noarch/sparse-0.15.4-pyh267e887_1.conda#4 https://conda.anaconda.org/conda-forge/linux-64/statsmodels-0.14.4-py312hc0a28a1_0.conda#97dc960f3d9911964d73c2cf240baea5 https://conda.anaconda.org/conda-forge/noarch/tifffile-2024.9.20-pyhd8ed1ab_0.conda#6de55c7859ed314159eaf2b7b4f19cc7 https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.26.0-h86fa3b2_0.conda#061175d9d4c046a1cf8bffe95a359fab -https://conda.anaconda.org/conda-forge/noarch/xarray-2024.9.0-pyhd8ed1ab_1.conda#dc790d427d89b85ae12fc094e264833f +https://conda.anaconda.org/conda-forge/noarch/xarray-2024.10.0-pyhd8ed1ab_0.conda#53e365732dfa053c4d19fc6b927392c4 https://conda.anaconda.org/conda-forge/noarch/zarr-2.18.3-pyhd8ed1ab_0.conda#41abde21508578e02e3fd492e82a05cd https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.23.0-py312hf9745cd_2.conda#cc3ecff140731b46b970a7c4787b1823 https://conda.anaconda.org/conda-forge/linux-64/cdo-2.4.1-h9fe33b1_1.conda#a326dab3d2a1a8e32c2a6f792fac3161 -https://conda.anaconda.org/conda-forge/noarch/cf_xarray-0.9.5-pyhd8ed1ab_1.conda#7ee17828b8e0472196ed1663cdc970cb +https://conda.anaconda.org/conda-forge/noarch/cf_xarray-0.10.0-pyhd8ed1ab_0.conda#9437cfe346eab83b011b4def99f0e879 https://conda.anaconda.org/conda-forge/noarch/cfgrib-0.9.14.1-pyhd8ed1ab_0.conda#1870fe8c9bd8967429e227be28ab94d2 https://conda.anaconda.org/conda-forge/noarch/chart-studio-1.1.0-pyh9f0ad1d_0.tar.bz2#acd9a12a35e5a0221bdf39eb6e4811dc https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_0.conda#53df00540de0348ed1b2a62684dd912b @@ -579,14 +580,14 @@ https://conda.anaconda.org/conda-forge/linux-64/libarrow-17.0.0-h8d2e343_13_cpu. https://conda.anaconda.org/conda-forge/linux-64/libgdal-kea-3.9.2-h1df15e4_7.conda#c693e703649051ee9db0fabd4fcd0483 https://conda.anaconda.org/conda-forge/linux-64/libgdal-netcdf-3.9.2-hf2d2f32_7.conda#4015ef020928219acc0b5c9edbce8d30 https://conda.anaconda.org/conda-forge/linux-64/libgdal-tiledb-3.9.2-h4a3bace_2.conda#c3fac34ecba2fcf9d5d31a03b975d5a1 -https://conda.anaconda.org/conda-forge/noarch/multiurl-0.3.1-pyhd8ed1ab_0.conda#4dff4abb5728f7662ecaaa8bee3a0260 +https://conda.anaconda.org/conda-forge/noarch/multiurl-0.3.2-pyhd8ed1ab_0.conda#9b6cf42ef472b332970282ec87d2e5d4 https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.0-pyhd8ed1ab_0.conda#15b51397e0fe8ea7d7da60d83eb76ebc https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/linux-64/nco-5.2.8-hf7c1f58_0.conda#6cd18a9c6b8269b0cd101ba9cc3d02ab https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda#8dab97d8a9616e07d779782995710aed https://conda.anaconda.org/conda-forge/noarch/prospector-1.12.1-pyhd8ed1ab_0.conda#8621ba9cf057da26d371b87cd2264259 -https://conda.anaconda.org/conda-forge/linux-64/psyplot-1.5.1-py312h7900ff3_0.conda#9a6ebd6c124dbf39a13b2529e16ddce8 -https://conda.anaconda.org/conda-forge/noarch/py-xgboost-2.1.1-cuda118_pyh40095f8_4.conda#93ab068c137810f697b41b41a53cec70 +https://conda.anaconda.org/conda-forge/linux-64/psyplot-1.5.1-py312h7900ff3_1.conda#f110e71421e5c86e50232cc027c6d85c +https://conda.anaconda.org/conda-forge/noarch/py-xgboost-2.1.2-cuda118_pyh40095f8_0.conda#aa5881b02bd9555a7b06c709aa33bd20 https://conda.anaconda.org/conda-forge/linux-64/pydot-3.0.1-py312h7900ff3_1.conda#c3d006b1d90fa9f5ae436ff9d6c40249 https://conda.anaconda.org/conda-forge/noarch/pyroma-4.2-pyhd8ed1ab_0.conda#fe2aca9a5d4cb08105aefc451ef96950 https://conda.anaconda.org/conda-forge/linux-64/r-bigmemory-4.6.4-r42ha503ecb_0.conda#12b6fa8fe80a6494a948c6ea2f34340d @@ -614,11 +615,11 @@ https://conda.anaconda.org/conda-forge/linux-64/r-timechange-0.3.0-r42ha503ecb_0 https://conda.anaconda.org/conda-forge/linux-64/r-xml2-1.3.6-r42hbfba7a4_1.conda#5c3d7a89a2d5e1c0885f92d1aa6fde30 https://conda.anaconda.org/conda-forge/linux-64/r-zoo-1.8_12-r42h57805ef_1.conda#5367d265c0c9c151dea85f1ccb515ec1 https://conda.anaconda.org/conda-forge/noarch/requests-cache-1.2.1-pyhd8ed1ab_0.conda#c6089540fed51a9a829aa19590fa925b -https://conda.anaconda.org/conda-forge/linux-64/scikit-image-0.24.0-py312h1df14c2_2.conda#104fecd2263afe390810307ad0bfe563 +https://conda.anaconda.org/conda-forge/linux-64/scikit-image-0.24.0-py312hf9745cd_3.conda#3612f99c589d51c363c8b90c0bcf3a18 https://conda.anaconda.org/conda-forge/noarch/seaborn-base-0.13.2-pyhd8ed1ab_2.conda#b713b116feaf98acdba93ad4d7f90ca1 -https://conda.anaconda.org/conda-forge/noarch/cads-api-client-1.4.4-pyhd8ed1ab_0.conda#ef4a03815973391882a6f0caa797e3fb -https://conda.anaconda.org/conda-forge/noarch/esgf-pyclient-0.3.1-pyhca7485f_3.conda#1d43833138d38ad8324700ce45a7099a -https://conda.anaconda.org/conda-forge/noarch/iris-3.10.0-pyha770c72_1.conda#b7212cd8247ce909631fdcb77015914a +https://conda.anaconda.org/conda-forge/noarch/cads-api-client-1.5.0-pyhd8ed1ab_0.conda#0ca8f6f735f6171aa178364cdbbebe4d +https://conda.anaconda.org/conda-forge/noarch/esgf-pyclient-0.3.1-pyhd8ed1ab_4.conda#f481c17430f801e68ee3b57cc30ecd2e +https://conda.anaconda.org/conda-forge/noarch/iris-3.10.0-pyha770c72_2.conda#5d8984ceb5fdf85110ca7108114ecc18 https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-17.0.0-h5888daf_13_cpu.conda#b654d072b8d5da807495e49b28a0b884 https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.9.2-ha770c72_7.conda#63779711c7afd4fcf9cea67538baa67a https://conda.anaconda.org/conda-forge/linux-64/libparquet-17.0.0-h39682fd_13_cpu.conda#49c60a8dc089d8127b9368e9eb6c1a77 @@ -628,7 +629,7 @@ https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_1 https://conda.anaconda.org/conda-forge/noarch/prov-2.0.0-pyhd3deb0d_0.tar.bz2#aa9b3ad140f6c0668c646f32e20ccf82 https://conda.anaconda.org/conda-forge/linux-64/psy-simple-1.5.1-py312h7900ff3_0.conda#683ec8787a523de54b02c885e2c2aefa https://conda.anaconda.org/conda-forge/noarch/py-cordex-0.8.0-pyhd8ed1ab_0.conda#fba377622e74ee0bbeb8ccae9fa593d3 -https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-17.0.0-py312h9cafe31_1_cpu.conda#235827b9c93850cafdd2d5ab359893f9 +https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-17.0.0-py312h01725c0_2_cpu.conda#add603bfa43d9bf3f06783f780e1a817 https://conda.anaconda.org/conda-forge/noarch/python-cdo-1.6.0-pyhd8ed1ab_0.conda#3fd1a0b063c1fbbe4b7bd5a5a7601e84 https://conda.anaconda.org/conda-forge/linux-64/r-akima-0.6_3.4-r42h61816a4_2.conda#8536251313f441c4d70ff11ad976d294 https://conda.anaconda.org/conda-forge/noarch/r-callr-3.7.6-r42hc72bb7e_0.conda#4fb1765d6dc531936db81af3f6be316a @@ -647,8 +648,8 @@ https://conda.anaconda.org/conda-forge/linux-64/r-splancs-2.01_45-r42hbcb9c34_0. https://conda.anaconda.org/conda-forge/linux-64/r-vctrs-0.6.5-r42ha503ecb_0.conda#5689030c60302fb5bb7a48b54c11dbe8 https://conda.anaconda.org/conda-forge/noarch/seaborn-0.13.2-hd8ed1ab_2.conda#a79d8797f62715255308d92d3a91ef2e https://conda.anaconda.org/conda-forge/noarch/xesmf-0.8.7-pyhd8ed1ab_0.conda#42301f78a4c6d2500f891b9723160d5c -https://conda.anaconda.org/conda-forge/noarch/xgboost-2.1.1-cuda118_pyh256f914_4.conda#2c026999ffd3407ddce239cac2da0972 -https://conda.anaconda.org/conda-forge/noarch/cdsapi-0.7.3-pyhd8ed1ab_0.conda#bb748c8dcbcc48b4565459a860b13616 +https://conda.anaconda.org/conda-forge/noarch/xgboost-2.1.2-cuda118_pyh256f914_0.conda#2dcf3e60ef65fd4cb95048f2491f6a89 +https://conda.anaconda.org/conda-forge/noarch/cdsapi-0.7.4-pyhd8ed1ab_0.conda#67a29b663023b8c0e3d8a73013ea3e23 https://conda.anaconda.org/conda-forge/linux-64/fiona-1.10.1-py312h5aa26c2_1.conda#4a30f4277a1894928a7057d0e14c1c95 https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-17.0.0-h5888daf_13_cpu.conda#cd2c36e8865b158b82f61c6aac28b7e1 https://conda.anaconda.org/conda-forge/noarch/nbconvert-pandoc-7.16.4-hd8ed1ab_1.conda#37cec2cf68f4c09563d8bc833791096b @@ -668,7 +669,7 @@ https://conda.anaconda.org/conda-forge/noarch/nbconvert-7.16.4-hd8ed1ab_1.conda# https://conda.anaconda.org/conda-forge/noarch/r-climprojdiags-0.3.3-r42hc72bb7e_0.conda#f34d40a3f0f9160fdd2bccaae8e185d1 https://conda.anaconda.org/conda-forge/noarch/r-lintr-3.1.2-r42hc72bb7e_0.conda#ef49cc606b94a9d5f30b9c48f5f68848 https://conda.anaconda.org/conda-forge/linux-64/r-tibble-3.2.1-r42h57805ef_2.conda#b1278a5148c9e52679bb72112770cdc3 -https://conda.anaconda.org/conda-forge/linux-64/pyarrow-17.0.0-py312h9cebb41_1.conda#7e8ddbd44fb99ba376b09c4e9e61e509 +https://conda.anaconda.org/conda-forge/linux-64/pyarrow-17.0.0-py312h9cebb41_2.conda#5f7d505626cb057e1320bbd46dd02ef2 https://conda.anaconda.org/conda-forge/noarch/r-ggplot2-3.5.1-r42hc72bb7e_0.conda#77cc0254e0dc92e5e7791ce20a170f74 https://conda.anaconda.org/conda-forge/noarch/r-rematch2-2.1.2-r42hc72bb7e_3.conda#5ccfee6f3b94e6b247c7e1929b24f1cc https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.16-pyhd8ed1ab_0.conda#81de1c44ab7f6cadab4a59b6d76dfa87 @@ -682,9 +683,9 @@ https://conda.anaconda.org/conda-forge/noarch/iris-esmf-regrid-0.11.0-pyhd8ed1ab https://conda.anaconda.org/conda-forge/linux-64/r-geomap-2.5_0-r42h57805ef_2.conda#020534c6abdee4f1253c221e926a5341 https://conda.anaconda.org/conda-forge/noarch/esmvalcore-2.11.0-pyhd8ed1ab_0.conda#ae2c9a927475f5519d0164c542cde378 https://conda.anaconda.org/conda-forge/noarch/r-s2dverification-2.10.3-r42hc72bb7e_2.conda#8079a86a913155fe2589ec0b76dc9f5e -https://conda.anaconda.org/conda-forge/noarch/autodocsumm-0.2.13-pyhd8ed1ab_0.conda#b2f4f2f3923646802215b040e63d042e +https://conda.anaconda.org/conda-forge/noarch/autodocsumm-0.2.14-pyhd8ed1ab_0.conda#351a11ac1215eb4f6c5b82e30070277a https://conda.anaconda.org/conda-forge/noarch/nbsphinx-0.9.5-pyhd8ed1ab_0.conda#b808b8a0494c5cca76200c73e260a060 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.15.4-pyhd8ed1ab_0.conda#c7c50dd5192caa58a05e6a4248a27acb +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.0-pyhd8ed1ab_0.conda#344261b0e77f5d2faaffb4eac225eeb7 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_0.conda#9075bd8c033f0257122300db914e49c9 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_0.conda#b3bcc38c471ebb738854f52a36059b48 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_0.conda#e25640d692c02e8acfff0372f547e940 From 12054d25b539347cc51902ac575e0553491b483e Mon Sep 17 00:00:00 2001 From: Manuel Schlund <32543114+schlunma@users.noreply.github.com> Date: Thu, 31 Oct 2024 16:01:19 +0100 Subject: [PATCH 11/36] Fix contourf plots for masked data (#3797) --- .../diag_scripts/monitor/multi_datasets.py | 40 +++++++++++++++++-- esmvaltool/diag_scripts/shared/plot/_plot.py | 12 +++++- 2 files changed, 46 insertions(+), 6 deletions(-) diff --git a/esmvaltool/diag_scripts/monitor/multi_datasets.py b/esmvaltool/diag_scripts/monitor/multi_datasets.py index 068c4033da..70faee96c2 100644 --- a/esmvaltool/diag_scripts/monitor/multi_datasets.py +++ b/esmvaltool/diag_scripts/monitor/multi_datasets.py @@ -608,6 +608,7 @@ from pprint import pformat import cartopy.crs as ccrs +import dask.array as da import iris import matplotlib as mpl import matplotlib.dates as mdates @@ -1178,8 +1179,15 @@ def _plot_map_with_ref(self, plot_func, dataset, ref_dataset): plot_kwargs['axes'] = axes_data if plot_func is iris.plot.contourf: # see https://github.com/SciTools/cartopy/issues/2457 + # and https://github.com/SciTools/cartopy/issues/2468 plot_kwargs['transform_first'] = True - plot_data = plot_func(cube, **plot_kwargs) + npx = da if cube.has_lazy_data() else np + cube_to_plot = cube.copy( + npx.ma.filled(cube.core_data(), np.nan) + ) + else: + cube_to_plot = cube + plot_data = plot_func(cube_to_plot, **plot_kwargs) axes_data.coastlines() if gridline_kwargs is not False: axes_data.gridlines(**gridline_kwargs) @@ -1196,7 +1204,17 @@ def _plot_map_with_ref(self, plot_func, dataset, ref_dataset): if self.plots[plot_type]['common_cbar']: plot_kwargs.setdefault('vmin', plot_data.get_clim()[0]) plot_kwargs.setdefault('vmax', plot_data.get_clim()[1]) - plot_ref = plot_func(ref_cube, **plot_kwargs) + if plot_func is iris.plot.contourf: + # see https://github.com/SciTools/cartopy/issues/2457 + # and https://github.com/SciTools/cartopy/issues/2468 + plot_kwargs['transform_first'] = True + npx = da if ref_cube.has_lazy_data() else np + ref_cube_to_plot = ref_cube.copy( + npx.ma.filled(ref_cube.core_data(), np.nan) + ) + else: + ref_cube_to_plot = ref_cube + plot_ref = plot_func(ref_cube_to_plot, **plot_kwargs) axes_ref.coastlines() if gridline_kwargs is not False: axes_ref.gridlines(**gridline_kwargs) @@ -1217,8 +1235,15 @@ def _plot_map_with_ref(self, plot_func, dataset, ref_dataset): plot_kwargs_bias['axes'] = axes_bias if plot_func is iris.plot.contourf: # see https://github.com/SciTools/cartopy/issues/2457 + # and https://github.com/SciTools/cartopy/issues/2468 plot_kwargs_bias['transform_first'] = True - plot_bias = plot_func(bias_cube, **plot_kwargs_bias) + npx = da if bias_cube.has_lazy_data() else np + bias_cube_to_plot = bias_cube.copy( + npx.ma.filled(bias_cube.core_data(), np.nan) + ) + else: + bias_cube_to_plot = bias_cube + plot_bias = plot_func(bias_cube_to_plot, **plot_kwargs_bias) axes_bias.coastlines() if gridline_kwargs is not False: axes_bias.gridlines(**gridline_kwargs) @@ -1276,8 +1301,15 @@ def _plot_map_without_ref(self, plot_func, dataset): plot_kwargs['axes'] = axes if plot_func is iris.plot.contourf: # see https://github.com/SciTools/cartopy/issues/2457 + # and https://github.com/SciTools/cartopy/issues/2468 plot_kwargs['transform_first'] = True - plot_map = plot_func(cube, **plot_kwargs) + npx = da if cube.has_lazy_data() else np + cube_to_plot = cube.copy( + npx.ma.filled(cube.core_data(), np.nan) + ) + else: + cube_to_plot = cube + plot_map = plot_func(cube_to_plot, **plot_kwargs) axes.coastlines() gridline_kwargs = self._get_gridline_kwargs(plot_type) if gridline_kwargs is not False: diff --git a/esmvaltool/diag_scripts/shared/plot/_plot.py b/esmvaltool/diag_scripts/shared/plot/_plot.py index 66f1e82c08..092479a999 100644 --- a/esmvaltool/diag_scripts/shared/plot/_plot.py +++ b/esmvaltool/diag_scripts/shared/plot/_plot.py @@ -4,6 +4,7 @@ from copy import deepcopy import cartopy.crs as ccrs +import dask.array as da import iris.quickplot import matplotlib.colors as colors import matplotlib.pyplot as plt @@ -228,10 +229,17 @@ def global_contourf(cube, if cbar_range is not None: levels = np.linspace(*cbar_range) kwargs['levels'] = levels - kwargs['transform_first'] = True # see SciTools/cartopy/issues/2457 axes = plt.axes(projection=ccrs.Robinson(central_longitude=10)) plt.sca(axes) - map_plot = iris.plot.contourf(cube, **kwargs) + + # see https://github.com/SciTools/cartopy/issues/2457 + # and https://github.com/SciTools/cartopy/issues/2468 + kwargs['transform_first'] = True + npx = da if cube.has_lazy_data() else np + map_plot = iris.plot.contourf( + cube.copy(npx.ma.filled(cube.core_data(), np.nan)), + **kwargs, + ) # Appearance axes.gridlines(color='lightgrey', alpha=0.5) From ab2e6622a715f01995346f5fa9d393577c7cefd3 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 1 Nov 2024 16:50:17 +0000 Subject: [PATCH 12/36] [Condalock] Update Linux condalock file (#3798) Co-authored-by: valeriupredoi --- conda-linux-64.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/conda-linux-64.lock b/conda-linux-64.lock index 1b089cf458..7521c7f30c 100644 --- a/conda-linux-64.lock +++ b/conda-linux-64.lock @@ -176,7 +176,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2023.09.01-h5a48ba9_2. https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-hc670b87_16.conda#3d9f3a2e5d7213c34997e4464d2f938c https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-h6565414_0.conda#80eaf80d84668fa5620ac9ec1b4bf56f https://conda.anaconda.org/conda-forge/linux-64/libxgboost-2.1.2-cuda118_h09a87be_0.conda#d59c3f95f80071f24ebce434494ead0a -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.4-hb346dea_1.conda#21f1e3d43686bc70bd98cc62a431a2cf +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.4-hb346dea_2.conda#69b90b70c434b916abf5a1d5ee5d55fb https://conda.anaconda.org/conda-forge/linux-64/minizip-4.0.7-h401b404_0.conda#4474532a312b2245c5c77f1176989b46 https://conda.anaconda.org/conda-forge/linux-64/mpfr-4.2.1-h90cbb55_3.conda#2eeb50cab6652538eee8fc0bc3340c81 https://conda.anaconda.org/conda-forge/linux-64/python-3.12.7-hc5c86c4_0_cpython.conda#0515111a9cdf69f83278f7c197db9807 @@ -294,7 +294,7 @@ https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.20.0-py312h12e396e_1.c https://conda.anaconda.org/conda-forge/linux-64/ruamel.yaml.clib-0.2.8-py312h66e93f0_1.conda#532c3e5d0280be4fea52396ec1fa7d5d https://conda.anaconda.org/conda-forge/noarch/semver-3.0.2-pyhd8ed1ab_0.conda#5efb3fccda53974aed800b6d575f72ed https://conda.anaconda.org/conda-forge/noarch/setoptconf-tmp-0.3.1-pyhd8ed1ab_0.tar.bz2#af3e36d4effb85b9b9f93cd1db0963df -https://conda.anaconda.org/conda-forge/noarch/setuptools-75.1.0-pyhd8ed1ab_0.conda#d5cd48392c67fb6849ba459c2c2b671f +https://conda.anaconda.org/conda-forge/noarch/setuptools-75.3.0-pyhd8ed1ab_0.conda#2ce9825396daf72baabaade36cee16da https://conda.anaconda.org/conda-forge/linux-64/simplejson-3.19.3-py312h66e93f0_1.conda#c8d1a609d5f3358d715c2273011d9f4d https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/smmap-5.0.0-pyhd8ed1ab_0.tar.bz2#62f26a3d1387acee31322208f0cfa3e0 @@ -433,7 +433,7 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-metadata-3.1.1-pyhd8ed1ab_0 https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_0.conda#4b9b5e086812283c052a9105ab1e254e https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda#b39568655c127a9c4a44d178ac99b6d0 https://conda.anaconda.org/conda-forge/noarch/python-build-1.2.2.post1-pyhff2d567_0.conda#bd5ae3c630d5eed353badb091fd3e603 -https://conda.anaconda.org/conda-forge/linux-64/suitesparse-7.8.2-hb42a789_0.conda#b7d1ce5a599ec2caf69673f5beff7696 +https://conda.anaconda.org/conda-forge/linux-64/suitesparse-7.8.3-hb42a789_0.conda#216922e19843f5662a2b260f905640cb https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312h68727a3_5.conda#f9664ee31aed96c85b7319ab0a693341 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxaw-1.0.16-hb9d3cd8_0.conda#7c0a9bf62d573409d12ad14b362a96e5 https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312hef9b889_1.conda#8b7069e9792ee4e5b4919a7a306d2e67 From b3bb4a7e144aab1e92a3abdffdf3fc772be9f38a Mon Sep 17 00:00:00 2001 From: Lukas Date: Wed, 6 Nov 2024 11:34:15 +0100 Subject: [PATCH 13/36] change authors name (#3806) --- esmvaltool/config-references.yml | 10 +++++----- esmvaltool/diag_scripts/monitor/multi_datasets.py | 6 +++++- .../recipes/monitor/recipe_monitor_with_refs.yml | 2 +- esmvaltool/recipes/recipe_shapeselect.yml | 2 +- 4 files changed, 12 insertions(+), 8 deletions(-) diff --git a/esmvaltool/config-references.yml b/esmvaltool/config-references.yml index 199dc671e0..79a85c9866 100644 --- a/esmvaltool/config-references.yml +++ b/esmvaltool/config-references.yml @@ -336,6 +336,11 @@ authors: name: Lillis, Jon institute: MetOffice, UK orcid: + lindenlaub_lukas: + name: Lindenlaub, Lukas + institute: University of Bremen, Germany + orcid: https://orcid.org/0000-0001-6349-9118 + github: lukruh little_bill: name: Little, Bill institute: MetOffice, UK @@ -466,11 +471,6 @@ authors: rol_evert: name: Rol, Evert orcid: https://orcid.org/0000-0001-8357-4453 - ruhe_lukas: - name: Ruhe, Lukas - institute: University of Bremen, Germany - orcid: https://orcid.org/0000-0001-6349-9118 - github: lukruh russell_joellen: name: Russell, Joellen institute: Univ. of Arizona, USA diff --git a/esmvaltool/diag_scripts/monitor/multi_datasets.py b/esmvaltool/diag_scripts/monitor/multi_datasets.py index 70faee96c2..41f238a64e 100644 --- a/esmvaltool/diag_scripts/monitor/multi_datasets.py +++ b/esmvaltool/diag_scripts/monitor/multi_datasets.py @@ -2576,7 +2576,11 @@ def create_hovmoeller_time_vs_lat_or_lon_plot(self, datasets): # Provenance tracking provenance_record = { 'ancestors': ancestors, - 'authors': ['schlund_manuel', 'kraft_jeremy', 'ruhe_lukas'], + 'authors': [ + 'schlund_manuel', + 'kraft_jeremy', + 'lindenlaub_lukas' + ], 'caption': caption, 'plot_types': ['zonal'], 'long_names': [dataset['long_name']], diff --git a/esmvaltool/recipes/monitor/recipe_monitor_with_refs.yml b/esmvaltool/recipes/monitor/recipe_monitor_with_refs.yml index 48c5153287..4277313428 100644 --- a/esmvaltool/recipes/monitor/recipe_monitor_with_refs.yml +++ b/esmvaltool/recipes/monitor/recipe_monitor_with_refs.yml @@ -10,7 +10,7 @@ documentation: - heuer_helge - kraft_jeremy - kuehbacher_birgit - - ruhe_lukas + - lindenlaub_lukas - sarauer_ellen - winterstein_franziska maintainer: diff --git a/esmvaltool/recipes/recipe_shapeselect.yml b/esmvaltool/recipes/recipe_shapeselect.yml index ee56810f03..b463f09df8 100644 --- a/esmvaltool/recipes/recipe_shapeselect.yml +++ b/esmvaltool/recipes/recipe_shapeselect.yml @@ -11,7 +11,7 @@ documentation: - berg_peter maintainer: - - ruhe_lukas + - lindenlaub_lukas projects: - c3s-magic From 7d8d72c43b2c3cd80fd68d53eee7231ce589f210 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 11 Nov 2024 13:22:45 +0000 Subject: [PATCH 14/36] [Condalock] Update Linux condalock file (#3809) Co-authored-by: valeriupredoi --- conda-linux-64.lock | 145 ++++++++++++++++++++++---------------------- 1 file changed, 72 insertions(+), 73 deletions(-) diff --git a/conda-linux-64.lock b/conda-linux-64.lock index 7521c7f30c..a3ad9b680c 100644 --- a/conda-linux-64.lock +++ b/conda-linux-64.lock @@ -27,9 +27,9 @@ https://conda.anaconda.org/conda-forge/linux-64/binutils_impl_linux-64-2.43-h4bf https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.2.0-h77fa898_1.conda#3cb76c3f10d3bc7f1105b2fc9db984df https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.28-hb9d3cd8_0.conda#1b53af320b24547ce0fb8196d2604542 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.2-heb4867d_0.conda#2b780c0338fc0ffa678ac82c54af51fd +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.3-heb4867d_0.conda#09a6c610d002e54e18353c06ef61a253 https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_2.conda#41b599ed2b02abcfdd84302bff174b23 -https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.3-h5888daf_0.conda#59f4c43bb1b5ef1c71946ff2cbf59524 +https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.4-h5888daf_0.conda#db833e03127376d461e1e13e76f09b6c https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_1.conda#e39480b9ca41323497b05492a63bc35b https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.2.0-hd5240d6_1.conda#9822b874ea29af082e5d36098d25427d https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda#234a5554c53625688d51062645337328 @@ -49,7 +49,7 @@ https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.19-h756ea98_3 https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.18-h756ea98_11.conda#eadcc12bedac44f13223a2909c0e5bcc https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda#62ee74e96c5ebb0af99386de58cf9553 https://conda.anaconda.org/conda-forge/linux-64/dav1d-1.2.1-hd590300_0.conda#418c6ca5929a611cbd69204907a83995 -https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.3-h5888daf_0.conda#6595440079bed734b113de44ffd3cd0a +https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.4-h5888daf_0.conda#1d6afef758879ef5ee78127eb4cd2c4a https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 https://conda.anaconda.org/conda-forge/linux-64/gettext-tools-0.22.5-he02047a_3.conda#fcd2016d1d299f654f81021e27496818 https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-h5888daf_1005.conda#d411fc29e338efb48c5fd4576d71d881 @@ -128,9 +128,11 @@ https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.2.0-h69a702a_1 https://conda.anaconda.org/conda-forge/linux-64/libhwy-1.1.0-h00ab1b0_0.conda#88928158ccfe797eac29ef5e03f7d23d https://conda.anaconda.org/conda-forge/linux-64/libllvm14-14.0.6-hcd5def8_4.conda#73301c133ded2bf71906aa2104edae8b https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.64.0-h161d5f1_0.conda#19e57602824042dfd0446292ef90488b +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_1.conda#62857b389e42b36b686331bec0922050 +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-ilp64-0.3.28-pthreads_h3e26593_1.conda#9d5c316d93ee4c5effd9afda8e8af823 https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.20.0-h0e7cc3e_1.conda#d0ed81c4591775b70384f4cc78e05cd1 https://conda.anaconda.org/conda-forge/linux-64/libunwind-1.6.2-h9c3ff4c_0.tar.bz2#a730b2badd586580c5752cc73842e068 -https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.1-hf83b1b0_0.conda#e8536ec89df2aec5f65fefcf4ccd58ba +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.2-h6991a6a_0.conda#a7b27c075c9b7f459f1c022090697cba https://conda.anaconda.org/conda-forge/linux-64/libzopfli-1.0.3-h9c3ff4c_0.tar.bz2#c66fe2d123249af7651ebde8984c51c2 https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/mbedtls-3.5.1-h59595ed_0.conda#a7b444a6e008b804b35521895e3440e2 @@ -165,12 +167,11 @@ https://conda.anaconda.org/conda-forge/linux-64/hdfeos2-2.20-h3e53b52_1004.conda https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 https://conda.anaconda.org/conda-forge/linux-64/libasprintf-devel-0.22.5-he8f35ee_3.conda#1091193789bb830127ed067a9e01ac57 https://conda.anaconda.org/conda-forge/linux-64/libavif16-1.1.1-h1909e37_2.conda#21e468ed3786ebcb2124b123aa2484b7 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-25_linux64_openblas.conda#8ea26d42ca88ec5258802715fe1ee10b https://conda.anaconda.org/conda-forge/linux-64/libgit2-1.8.4-hd24f944_0.conda#94887b4deb460378a34e1533beaacfd5 https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_0.conda#13e8e54035ddd2b91875ba399f0f7c04 https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.0-hdb8da77_2.conda#9c4554fafc94db681543804037e65de2 https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-hf539b9f_1021.conda#e8c7620cc49de0c6a2349b6dd6e39beb -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_0.conda#9ebc9aedafaa2515ab247ff6bb509458 -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-ilp64-0.3.28-pthreads_h3e26593_0.conda#2bd7dc48907a3b6bf766ed87867f3459 https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-4.25.3-hd5b35b9_1.conda#06def97690ef90781a91b786cb48a0a9 https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2023.09.01-h5a48ba9_2.conda#41c69fba59d495e8cf5ffda48a607e35 https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-hc670b87_16.conda#3d9f3a2e5d7213c34997e4464d2f938c @@ -179,6 +180,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libxgboost-2.1.2-cuda118_h09a87b https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.4-hb346dea_2.conda#69b90b70c434b916abf5a1d5ee5d55fb https://conda.anaconda.org/conda-forge/linux-64/minizip-4.0.7-h401b404_0.conda#4474532a312b2245c5c77f1176989b46 https://conda.anaconda.org/conda-forge/linux-64/mpfr-4.2.1-h90cbb55_3.conda#2eeb50cab6652538eee8fc0bc3340c81 +https://conda.anaconda.org/conda-forge/linux-64/openblas-ilp64-0.3.28-pthreads_h3d04fff_1.conda#fdaa89df7b34f5c904f8f1348e5a62a5 https://conda.anaconda.org/conda-forge/linux-64/python-3.12.7-hc5c86c4_0_cpython.conda#0515111a9cdf69f83278f7c197db9807 https://conda.anaconda.org/conda-forge/linux-64/s2geometry-0.10.0-h8413349_4.conda#d19f88cf8812836e6a4a2a7902ed0e77 https://conda.anaconda.org/conda-forge/linux-64/spdlog-1.14.1-hed91bc2_1.conda#909188c8979846bac8e586908cf1ca6a @@ -191,7 +193,6 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxt-1.3.0-hb9d3cd8_2.cond https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_0.conda#ae5f4ad87126c55ba3f690ef07f81d64 https://conda.anaconda.org/conda-forge/noarch/aiohappyeyeballs-2.4.3-pyhd8ed1ab_0.conda#ec763b0a58960558ca0ad7255a51a237 https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_0.conda#7d78a232029458d0077ede6cda30ed0c -https://conda.anaconda.org/conda-forge/noarch/antlr-python-runtime-4.11.1-pyhd8ed1ab_0.tar.bz2#15109c4977d39ad7aa3423f57243e286 https://conda.anaconda.org/conda-forge/noarch/asciitree-0.3.3-py_2.tar.bz2#c0481c9de49f040272556e2cedf42816 https://conda.anaconda.org/conda-forge/linux-64/astroid-3.3.5-py312h7900ff3_0.conda#e1ed4d572a4a16b97368ab00fd646487 https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda#f730d54ba9cd543666d7220c9f7ed563 @@ -235,7 +236,7 @@ https://conda.anaconda.org/conda-forge/noarch/geographiclib-2.0-pyhd8ed1ab_0.tar https://conda.anaconda.org/conda-forge/linux-64/gettext-0.22.5-he02047a_3.conda#c7f243bbaea97cd6ea1edd693270100e https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyh9f0ad1d_0.tar.bz2#914d6646c4dbb1fd3ff539830a12fd71 -https://conda.anaconda.org/conda-forge/noarch/humanfriendly-10.0-pyhd8ed1ab_6.conda#2ed1fe4b9079da97c44cfe9c2e5078fd +https://conda.anaconda.org/conda-forge/noarch/humanfriendly-10.0-pyhd81877a_7.conda#74fbff91ca7c1b9a36b15903f2242f86 https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_0.tar.bz2#9f765cbfab6870c8435b9eefecd7a1f4 https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_0.conda#7ba2ede0e7c795ff95088daf0dc59753 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 @@ -246,12 +247,13 @@ https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.7-py312h68727a3_0 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 https://conda.anaconda.org/conda-forge/noarch/legacy-cgi-2.6.1-pyh5b84bb0_3.conda#f258b7f54b5d9ddd02441f10c4dca2ac https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.7.4-hfca40fe_0.conda#32ddb97f897740641d8d46a829ce1704 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-25_linux64_openblas.conda#8ea26d42ca88ec5258802715fe1ee10b +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-25_linux64_openblas.conda#5dbd1b0fc0d01ec5e0e1fbe667281a11 https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.9.1-hdb1bdb2_0.conda#7da1d242ca3591e174a3c7d82230d3c0 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hd3e95f3_10.conda#30ee3a29c84cf7b842a8c5828c4b7c13 https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.0-ha6d2627_1004.conda#df069bea331c8486ac21814969301c1f https://conda.anaconda.org/conda-forge/linux-64/libheif-1.18.2-gpl_hffcb242_100.conda#76ac2c07b62d45c192940f010eea11fa -https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.1-default_hecaa2ac_1000.conda#f54aeebefb5c5ff84eca4fb05ca8aa3a +https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.2-default_he43201b_1000.conda#36247217c4e1018085bd9db41eb3526a +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-25_linux64_openblas.conda#4dc03a53fc69371a6158d0ed37214cd3 https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.4.0-h2c329e2_0.conda#80030debaa84cfc31755d53742df3ca6 https://conda.anaconda.org/conda-forge/linux-64/libxslt-1.1.39-h76b75d6_0.conda#e71f31f8cfb0a91439f2086fc8aa0461 https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.43.0-py312h374181b_1.conda#ed6ead7e9ab9469629c6cfb363b5c6e2 @@ -266,7 +268,6 @@ https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2 https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda#4eccaeba205f0aed9ac3a9ea58568ca3 https://conda.anaconda.org/conda-forge/noarch/natsort-8.4.0-pyhd8ed1ab_0.conda#70959cd1db3cf77b2a27a0836cfd08a7 https://conda.anaconda.org/conda-forge/noarch/networkx-3.4.2-pyhd8ed1ab_1.conda#1d4c088869f206413c59acdd309908b7 -https://conda.anaconda.org/conda-forge/linux-64/openblas-ilp64-0.3.28-pthreads_h3d04fff_0.conda#eb2736b14329cf5650917caa43a549c6 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.2-h488ebb8_0.conda#7f2e286780f072ed750df46dc2631138 https://conda.anaconda.org/conda-forge/linux-64/orc-2.0.2-h669347b_0.conda#1e6c10f7d749a490612404efeb179eb8 https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda#cbe1bb1f21567018ce595d9c2be0f0db @@ -290,7 +291,7 @@ https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.5.0-py312h66e93f https://conda.anaconda.org/conda-forge/noarch/pytz-2024.2-pyhd8ed1ab_0.conda#260009d03c9d5c0f111904d851f053dc https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h66e93f0_1.conda#549e5930e768548a89c23f595dac5a95 https://conda.anaconda.org/conda-forge/linux-64/re2-2023.09.01-h7f4b329_2.conda#8f70e36268dea8eb666ef14c29bd3cda -https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.20.0-py312h12e396e_1.conda#9ae193ac9c1ead5024d5a4ee0024e9a6 +https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.21.0-py312h12e396e_0.conda#37f4ad7cb4214c799f32e5f411c6c69f https://conda.anaconda.org/conda-forge/linux-64/ruamel.yaml.clib-0.2.8-py312h66e93f0_1.conda#532c3e5d0280be4fea52396ec1fa7d5d https://conda.anaconda.org/conda-forge/noarch/semver-3.0.2-pyhd8ed1ab_0.conda#5efb3fccda53974aed800b6d575f72ed https://conda.anaconda.org/conda-forge/noarch/setoptconf-tmp-0.3.1-pyhd8ed1ab_0.tar.bz2#af3e36d4effb85b9b9f93cd1db0963df @@ -318,7 +319,7 @@ https://conda.anaconda.org/conda-forge/linux-64/ujson-5.10.0-py312h2ec8cdc_1.con https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py312h66e93f0_1.conda#588486a61153f94c7c13816f7069e440 https://conda.anaconda.org/conda-forge/noarch/untokenize-0.1.1-pyhd8ed1ab_1.conda#6042b782b893029aa40335782584a092 https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda#daf5160ff9cde3a468556965329085b9 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda#d44e3b085abcaef02983c6305b84b584 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.45.0-pyhd8ed1ab_0.conda#f9751d7c71df27b2d29f5cab3378982e https://conda.anaconda.org/conda-forge/noarch/xlsxwriter-3.2.0-pyhd8ed1ab_0.conda#a1f7264726115a2f8eac9773b1f27eba https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxmu-1.2.1-hb9d3cd8_1.conda#f35a9a2da717ade815ffa70c0e8bdfbd @@ -326,12 +327,13 @@ https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_0. https://conda.anaconda.org/conda-forge/noarch/yapf-0.32.0-pyhd8ed1ab_0.tar.bz2#177cba0b4bdfacad5c5fbb0ed31504c4 https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h3b0a872_6.conda#113506c8d2d558e733f5c38f6bf08c50 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 -https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.2-pyhd8ed1ab_0.conda#4daaed111c05672ae669f7036ee5bba3 +https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_0.conda#fee389bf8a4843bd7a2248ce11b7f188 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_0.conda#1bb1ef9806a9a20872434f58b3e7fc1a https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.1-pyhd8ed1ab_0.tar.bz2#d1e1eb7e21a9e2c74279d87dafb68156 +https://conda.anaconda.org/conda-forge/linux-64/arpack-3.9.1-nompi_h77f6705_101.conda#ff39030debb47f6b53b45bada38e0903 https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.6.5-hbaf354b_4.conda#2cefeb144de7712995d1b52cc6a3864c https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.13.0-h935415a_0.conda#debd1677c2fea41eb2233a260f48a298 -https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda#9669586875baeced8fc30c0826c3270e +https://conda.anaconda.org/conda-forge/noarch/babel-2.16.0-pyhd8ed1ab_0.conda#6d4e9ecca8d88977147e109fc7053184 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda#332493000404d8411859539a5a630865 https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyhd8ed1ab_0.conda#461bcfab8e65c166e297222ae919a2d4 https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h06ac9bb_0.conda#a861504bbea4161a9170b85d4d2be840 @@ -350,6 +352,7 @@ https://conda.anaconda.org/conda-forge/linux-64/freeglut-3.2.2-ha6d2627_3.conda# https://conda.anaconda.org/conda-forge/noarch/geopy-2.4.1-pyhd8ed1ab_1.conda#358c17429c97883b2cb9ab5f64bc161b https://conda.anaconda.org/conda-forge/linux-64/git-2.46.0-pl5321hb5640b7_0.conda#825d146359bc8b85083d92259d0a0e1b https://conda.anaconda.org/conda-forge/noarch/gitdb-4.0.11-pyhd8ed1ab_0.conda#623b19f616f2ca0c261441067e18ae40 +https://conda.anaconda.org/conda-forge/linux-64/gsl-2.7-he838d99_0.tar.bz2#fec079ba39c9cca093bf4c00001825de https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2#b748fbf7060927a6e82df7cb5ee8f097 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-9.0.0-hda332d3_1.conda#76b32dcf243444aea9c6b804bcfa40b8 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.3-nompi_hdf9ad27_105.conda#7e1729554e209627636a0f6fabcdd115 @@ -361,13 +364,12 @@ https://conda.anaconda.org/conda-forge/noarch/joblib-1.4.2-pyhd8ed1ab_0.conda#25 https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh31011fe_1.conda#0a2980dada0dd7fd0998f0342308b1b1 https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_1.conda#afcd1b53bcac8844540358e33f33d28f https://conda.anaconda.org/conda-forge/noarch/latexcodec-2.0.1-pyh9f0ad1d_0.tar.bz2#8d67904973263afd2985ba56aa2d6bb4 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-25_linux64_openblas.conda#5dbd1b0fc0d01ec5e0e1fbe667281a11 https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.62.2-h15f2491_0.conda#8dabe607748cb3d7002ad73cd06f1325 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-25_linux64_openblas.conda#4dc03a53fc69371a6158d0ed37214cd3 https://conda.anaconda.org/conda-forge/noarch/logilab-common-1.7.3-py_0.tar.bz2#6eafcdf39a7eb90b6d951cfff59e8d3b https://conda.anaconda.org/conda-forge/linux-64/lxml-5.3.0-py312he28fd5a_2.conda#3acf38086326f49afed094df4ba7c9d9 https://conda.anaconda.org/conda-forge/noarch/nested-lookup-0.2.25-pyhd8ed1ab_1.tar.bz2#2f59daeb14581d41b1e2dda0895933b2 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda#dfe0528d0f1c16c1f7c528ea5536ab30 +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.4-py312heda63a1_0.conda#d8285bea2a350f63fab23bf460221f3f https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.8-hedd0468_0.conda#dcd0ed5147d8876b0848a552b416ce76 https://conda.anaconda.org/conda-forge/linux-64/openpyxl-3.1.5-py312h710cb58_1.conda#69a8838436435f59d72ddcb8dfd24a28 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 @@ -380,16 +382,16 @@ https://conda.anaconda.org/conda-forge/noarch/pydocstyle-6.3.0-pyhd8ed1ab_0.cond https://conda.anaconda.org/conda-forge/noarch/pyproject_hooks-1.2.0-pyh7850678_0.conda#5003da197661e40a2509e9c4651f1eea https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda#c03d61f31f38fdb9facf70c29958bf7a https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda#2cf4264fffb9e6eff6031c5b6884d61c -https://conda.anaconda.org/conda-forge/noarch/python-utils-3.8.2-pyhd8ed1ab_0.conda#89703b4f38bd1c0353881f085bc8fdaa +https://conda.anaconda.org/conda-forge/noarch/python-utils-3.9.0-pyhff2d567_0.conda#ae8d4e318695c0d3e3464ed95cc8b385 https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.2.0-py312hbf22597_3.conda#746ce19f0829ec3e19c93007b1a224d3 https://conda.anaconda.org/conda-forge/noarch/rdflib-7.1.1-pyh0610db2_0.conda#325219de79481bcf5b6446d327e3d492 https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_0.conda#0fc8b52192a8898627c3efae1003e9f6 https://conda.anaconda.org/conda-forge/noarch/requirements-detector-1.3.1-pyhd8ed1ab_0.conda#f921ea6a1138cc7edee77de8ed12b226 https://conda.anaconda.org/conda-forge/noarch/retrying-1.3.3-pyhd8ed1ab_3.conda#1f7482562f2082f1b2abf8a3e2a41b63 https://conda.anaconda.org/conda-forge/linux-64/ruamel.yaml-0.18.6-py312h66e93f0_1.conda#28ed869ade5601ee374934a31c9d628e -https://conda.anaconda.org/conda-forge/linux-64/tbb-2021.13.0-h84d6215_0.conda#ee6f7fd1e76061ef1fa307d41fa86a96 +https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.0.0-hceb3a55_0.conda#79f0161f3ca73804315ca980f65d9c60 https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.4.0-pyhd8ed1ab_0.conda#f1acf5fdefa8300de697982bcb1761c9 -https://conda.anaconda.org/conda-forge/noarch/tqdm-4.66.6-pyhd8ed1ab_0.conda#92718e1f892e1e4623dcc59b9f9c4e55 +https://conda.anaconda.org/conda-forge/noarch/tqdm-4.67.0-pyhd8ed1ab_0.conda#196a9e6ab4e036ceafa516ea036619b0 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda#52d648bd608f5737b123f510bb5514b5 https://conda.anaconda.org/conda-forge/noarch/url-normalize-1.4.3-pyhd8ed1ab_0.tar.bz2#7c4076e494f0efe76705154ac9302ba6 https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.1-pyhd8ed1ab_0.conda#dae21509d62aa7bf676279ced3edcb3f @@ -399,18 +401,22 @@ https://conda.anaconda.org/conda-forge/noarch/yamale-5.2.1-pyhca7485f_0.conda#c0 https://conda.anaconda.org/conda-forge/noarch/yamllint-1.35.1-pyhd8ed1ab_0.conda#a1240b99a7ccd953879dc63111823986 https://conda.anaconda.org/conda-forge/linux-64/yarl-1.16.0-py312h66e93f0_0.conda#c3f4a6b56026c22319bf31514662b283 https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.10.10-py312h178313f_0.conda#d2f9e490ab2eae3e661b281346618a82 -https://conda.anaconda.org/conda-forge/linux-64/arpack-3.9.1-nompi_h77f6705_101.conda#ff39030debb47f6b53b45bada38e0903 https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.28.2-h6c0439f_6.conda#4e472c316d08af60faeb71f86d7563e1 https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.8.0-hd126650_2.conda#36df3cf05459de5d0a41c77c4329634b https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.7.0-h10ac4d7_1.conda#ab6d507ad16dbe2157920451d662e4a1 https://conda.anaconda.org/conda-forge/noarch/cattrs-24.1.2-pyhd8ed1ab_0.conda#ac582de2324988b79870b50c89c91c75 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py312hc0a28a1_1.conda#990033147b0a998e756eaaed6b28f48d +https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyh24bf2e0_0.tar.bz2#b73afa0d009a51cabd3ec99c4d2ef4f3 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.0-py312h68727a3_2.conda#ff28f374b31937c048107521c814791e https://conda.anaconda.org/conda-forge/linux-64/cryptography-43.0.3-py312hda17c39_0.conda#2abada8c216dd6e32514535a3fa245d4 +https://conda.anaconda.org/conda-forge/noarch/eofs-1.4.1-pyhd8ed1ab_1.conda#5fc43108dee4106f23050acc7a101233 https://conda.anaconda.org/conda-forge/noarch/flake8-polyfill-1.0.2-py_0.tar.bz2#a53db35e3d07f0af2eccd59c2a00bffe https://conda.anaconda.org/conda-forge/noarch/funcargparse-0.2.5-pyhd8ed1ab_0.tar.bz2#e557b70d736251fa0bbb7c4497852a92 https://conda.anaconda.org/conda-forge/linux-64/geotiff-1.7.3-hf7fa9e8_2.conda#1d6bdc6b2c62c8cc90c67b50142d7b7f https://conda.anaconda.org/conda-forge/noarch/gitpython-3.1.43-pyhd8ed1ab_0.conda#0b2154c1818111e17381b1df5b4b0176 -https://conda.anaconda.org/conda-forge/linux-64/gsl-2.7-he838d99_0.tar.bz2#fec079ba39c9cca093bf4c00001825de https://conda.anaconda.org/conda-forge/linux-64/hdfeos5-5.1.16-hf1a501a_15.conda#d2e16a32f41d67c7d280da11b2846328 +https://conda.anaconda.org/conda-forge/linux-64/imagecodecs-2024.6.1-py312h6d9a048_4.conda#a810fadedc4edc06b4282d1222467837 +https://conda.anaconda.org/conda-forge/noarch/imageio-2.36.0-pyh12aca89_1.conda#36349844ff73fcd0140ee7f30745f0bf https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-7.2.1-hd8ed1ab_0.conda#d6c936d009aa63e5f82d216c95cdcaee https://conda.anaconda.org/conda-forge/linux-64/jasper-4.2.4-h536e39c_0.conda#9518ab7016cf4564778aef08b6bd8792 https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_0.conda#720745920222587ef942acfbc578b584 @@ -421,67 +427,72 @@ https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.28.0-h26d7fe4_ https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h135f659_114.conda#a908e463c710bd6b10a9eaa89fdf003c https://conda.anaconda.org/conda-forge/linux-64/libpq-17.0-h04577a9_4.conda#392cae2a58fbcb9db8c2147c6d6d1620 https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-h15fa968_9.conda#4957a903bd6a68cc2e53e47476f9c6f4 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.4-py312heda63a1_0.conda#d8285bea2a350f63fab23bf460221f3f +https://conda.anaconda.org/conda-forge/noarch/magics-python-1.5.8-pyhd8ed1ab_1.conda#3fd7e3db129f12362642108f23fde521 +https://conda.anaconda.org/conda-forge/linux-64/numba-0.60.0-py312h83e6fd3_0.conda#e064ca33edf91ac117236c4b5dee207a +https://conda.anaconda.org/conda-forge/linux-64/numcodecs-0.13.1-py312hf9745cd_0.conda#33c27209bfd7af6766211facd24839ce +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.4-py312hfb8ada1_0.conda#d0745ae74c2b26571b692ddde112eebb https://conda.anaconda.org/conda-forge/linux-64/pango-1.54.0-h4c5309f_1.conda#7df02e445367703cd87a574046e3a6f0 +https://conda.anaconda.org/conda-forge/noarch/patsy-0.5.6-pyhd8ed1ab_0.conda#a5b55d1cb110cdcedc748b5c3e16e687 https://conda.anaconda.org/conda-forge/noarch/progressbar2-4.5.0-pyhd8ed1ab_0.conda#6f9eb38d0a87898cf5a7c91adaccd691 https://conda.anaconda.org/conda-forge/noarch/pybtex-0.24.0-pyhd8ed1ab_2.tar.bz2#2099b86a7399c44c0c61cdb6de6915ba https://conda.anaconda.org/conda-forge/noarch/pylint-3.3.1-pyhd8ed1ab_0.conda#2a3426f75e2172c932131f4e3d51bcf4 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py312h9211aeb_9.conda#173afeb0d112c854fd1a9fcac4b5cce3 +https://conda.anaconda.org/conda-forge/linux-64/pys2index-0.1.5-py312hfb10629_0.conda#325cc5f0e0dc36562f3de2a4dbded572 https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_0.conda#cb8a11b6d209e3d85e5094bdbd9ebd9c https://conda.anaconda.org/conda-forge/noarch/pytest-env-1.1.5-pyhd8ed1ab_0.conda#ecd5e850bcd3eca02143e7df030ee50f https://conda.anaconda.org/conda-forge/noarch/pytest-metadata-3.1.1-pyhd8ed1ab_0.conda#52b91ecba854d55b28ad916a8b10da24 https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_0.conda#4b9b5e086812283c052a9105ab1e254e https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda#b39568655c127a9c4a44d178ac99b6d0 https://conda.anaconda.org/conda-forge/noarch/python-build-1.2.2.post1-pyhff2d567_0.conda#bd5ae3c630d5eed353badb091fd3e603 +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.7.0-py312hc0a28a1_2.conda#8300d634adec4a6aed35a87e90e9cb07 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py312h62794b6_1.conda#b43233a9e2f62fb94affe5607ea79473 +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py312h6cab151_1.conda#5be02e05e1adaa42826cc6800ce399bc +https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_1.conda#5abeaa41ec50d4d1421a8bc8fbc93054 https://conda.anaconda.org/conda-forge/linux-64/suitesparse-7.8.3-hb42a789_0.conda#216922e19843f5662a2b260f905640cb https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312h68727a3_5.conda#f9664ee31aed96c85b7319ab0a693341 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxaw-1.0.16-hb9d3cd8_0.conda#7c0a9bf62d573409d12ad14b362a96e5 https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312hef9b889_1.conda#8b7069e9792ee4e5b4919a7a306d2e67 https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.379-h5a9005d_9.conda#5dc18b385893b7991a3bbeb135ad7c3e https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.12.0-hd2e3451_0.conda#61f1c193452f0daa582f39634627ea33 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py312hc0a28a1_1.conda#990033147b0a998e756eaaed6b28f48d -https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyh24bf2e0_0.tar.bz2#b73afa0d009a51cabd3ec99c4d2ef4f3 -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.0-py312h68727a3_2.conda#ff28f374b31937c048107521c814791e -https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.10.0-pyhd8ed1ab_0.conda#7823092a3cf14e98a52d2a2875c47c80 +https://conda.anaconda.org/conda-forge/noarch/bokeh-3.6.1-pyhd8ed1ab_0.conda#e88d74bb7b9b89d4c9764286ceb94cc9 +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py312hc0a28a1_0.conda#8b5b812d4c18cb37bda7a7c8d3a6acb3 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.11.0-pyhd8ed1ab_0.conda#75c96f0655908f596a57be60251b78d4 https://conda.anaconda.org/conda-forge/linux-64/eccodes-2.38.3-h8bb6dbc_1.conda#73265d4acc551063cc5c5beab37f33c5 -https://conda.anaconda.org/conda-forge/noarch/eofs-1.4.1-pyhd8ed1ab_1.conda#5fc43108dee4106f23050acc7a101233 https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h6470451_5.conda#1483ba046164be27df7f6eddbcec3a12 -https://conda.anaconda.org/conda-forge/noarch/identify-2.6.1-pyhd8ed1ab_0.conda#43f629202f9eec21be5f71171fb5daf8 -https://conda.anaconda.org/conda-forge/linux-64/imagecodecs-2024.6.1-py312h6d9a048_4.conda#a810fadedc4edc06b4282d1222467837 -https://conda.anaconda.org/conda-forge/noarch/imageio-2.36.0-pyh12aca89_1.conda#36349844ff73fcd0140ee7f30745f0bf +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.2-pyhd8ed1ab_0.conda#636950f839e065401e2031624a414f0b +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_0.conda#da304c192ad59975202859b367d0f6a2 https://conda.anaconda.org/conda-forge/linux-64/julia-1.10.4-hf18f99d_1.conda#cc0ef9c191bab16211970a29b6787d69 https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_1.conda#ec6f70b8a5242936567d4f886726a372 https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.9.2-h353785f_1.conda#c363d0b330b4b21b4c1b10e0981d3a99 https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.28.0-ha262f82_0.conda#9e7960f0b9ab3895ef73d92477c47dae https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-hc0ffecb_0.conda#83f045969988f5c7a65f3950b95a8b35 -https://conda.anaconda.org/conda-forge/noarch/magics-python-1.5.8-pyhd8ed1ab_1.conda#3fd7e3db129f12362642108f23fde521 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.2-py312hd3ec401_2.conda#2380c9ba933ffaac9ad16d8eac8e3318 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_h22f9119_106.conda#5b911bfe75855326bae6857451268e59 -https://conda.anaconda.org/conda-forge/linux-64/numba-0.60.0-py312h83e6fd3_0.conda#e064ca33edf91ac117236c4b5dee207a -https://conda.anaconda.org/conda-forge/linux-64/numcodecs-0.13.1-py312hf9745cd_0.conda#33c27209bfd7af6766211facd24839ce -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.4-py312hfb8ada1_0.conda#d0745ae74c2b26571b692ddde112eebb -https://conda.anaconda.org/conda-forge/noarch/patsy-0.5.6-pyhd8ed1ab_0.conda#a5b55d1cb110cdcedc748b5c3e16e687 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.1-nompi_py312h21d6d8e_102.conda#9049ba34261ce7106220711d313fcf61 https://conda.anaconda.org/conda-forge/noarch/pep8-naming-0.10.0-pyh9f0ad1d_0.tar.bz2#b3c5536e4f9f58a4b16adb6f1e11732d https://conda.anaconda.org/conda-forge/linux-64/postgresql-17.0-h1122569_4.conda#028ea131f116f13bb2a4a382b5863a04 https://conda.anaconda.org/conda-forge/noarch/pylint-plugin-utils-0.8.2-pyhd8ed1ab_0.conda#84377261c09c02182d76fbe79e69c9bf https://conda.anaconda.org/conda-forge/noarch/pyopenssl-24.2.1-pyhd8ed1ab_2.conda#85fa2fdd26d5a38792eb57bc72463f07 -https://conda.anaconda.org/conda-forge/linux-64/pys2index-0.1.5-py312hfb10629_0.conda#325cc5f0e0dc36562f3de2a4dbded572 https://conda.anaconda.org/conda-forge/noarch/pytest-html-4.1.1-pyhd8ed1ab_0.conda#4d2040212307d18392a2687772b3a96d -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.7.0-py312hc0a28a1_2.conda#8300d634adec4a6aed35a87e90e9cb07 https://conda.anaconda.org/conda-forge/linux-64/r-base-4.2.3-h32f4cee_16.conda#feee98a221344be7a447b80b410df867 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py312h62794b6_1.conda#b43233a9e2f62fb94affe5607ea79473 -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py312h6cab151_1.conda#5be02e05e1adaa42826cc6800ce399bc -https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_1.conda#5abeaa41ec50d4d1421a8bc8fbc93054 +https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.5.2-py312h7a48858_1.conda#6b5f4c68483bd0c22bca9094dafc606b +https://conda.anaconda.org/conda-forge/noarch/seawater-3.3.5-pyhd8ed1ab_0.conda#8e1b01f05e8f97b0fcc284f957175903 +https://conda.anaconda.org/conda-forge/noarch/sparse-0.15.4-pyh267e887_1.conda#40d80cd9fa4cc759c6dba19ea96642db +https://conda.anaconda.org/conda-forge/linux-64/statsmodels-0.14.4-py312hc0a28a1_0.conda#97dc960f3d9911964d73c2cf240baea5 https://conda.anaconda.org/conda-forge/linux-64/tempest-remap-2.2.0-h13910d2_3.conda#7f10762cd62c8ad03323c4dc3ee544b1 +https://conda.anaconda.org/conda-forge/noarch/tifffile-2024.9.20-pyhd8ed1ab_0.conda#6de55c7859ed314159eaf2b7b4f19cc7 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_0.conda#6b55867f385dd762ed99ea687af32a69 +https://conda.anaconda.org/conda-forge/noarch/xarray-2024.10.0-pyhd8ed1ab_0.conda#53e365732dfa053c4d19fc6b927392c4 +https://conda.anaconda.org/conda-forge/noarch/zarr-2.18.3-pyhd8ed1ab_0.conda#41abde21508578e02e3fd492e82a05cd https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.11.0-h325d260_1.conda#11d926d1f4a75a1b03d1c053ca20424b -https://conda.anaconda.org/conda-forge/noarch/bokeh-3.6.0-pyhd8ed1ab_0.conda#6728ca650187933a007b89f00ece4279 -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py312hc0a28a1_6.conda#fa4853d25b6fbfef5eb7b3e1b5616dd5 -https://conda.anaconda.org/conda-forge/noarch/distributed-2024.10.0-pyhd8ed1ab_0.conda#b3b498f7bcc9a2543ad72a3501f3d87b +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.23.0-py312hf9745cd_2.conda#cc3ecff140731b46b970a7c4787b1823 +https://conda.anaconda.org/conda-forge/noarch/cf_xarray-0.10.0-pyhd8ed1ab_0.conda#9437cfe346eab83b011b4def99f0e879 +https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_0.conda#53df00540de0348ed1b2a62684dd912b +https://conda.anaconda.org/conda-forge/noarch/distributed-2024.11.0-pyhd8ed1ab_0.conda#497f3535cbb69cd2f02158e2e18ee0bb https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 https://conda.anaconda.org/conda-forge/linux-64/gdal-3.9.2-py312h1299960_7.conda#9cf27e3f9d97ea13f250db9253a25dc8 https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/linux-64/libgdal-fits-3.9.2-h2db6552_7.conda#524e64f1aa0ebc87230109e684f392f4 https://conda.anaconda.org/conda-forge/linux-64/libgdal-grib-3.9.2-hc3b29a1_7.conda#56a7436a66a1a4636001ce4b621a3a33 https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf4-3.9.2-hd5ecb85_7.conda#9c8431dc0b83d5fe9c12a2c0b6861a72 @@ -492,11 +503,12 @@ https://conda.anaconda.org/conda-forge/linux-64/libgdal-pg-3.9.2-h5e77dd0_7.cond https://conda.anaconda.org/conda-forge/linux-64/libgdal-postgisraster-3.9.2-h5e77dd0_7.conda#3392965ffc4e8b7c66a532750ce0e91f https://conda.anaconda.org/conda-forge/linux-64/libgdal-xls-3.9.2-h03c987c_7.conda#165f12373452e8d17889e9c877431acf https://conda.anaconda.org/conda-forge/linux-64/magics-4.15.4-h24e9adf_1.conda#9731bb0d2a3917cab718fd7c90dea857 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.2-py312hd3ec401_1.conda#2f4f3854f23be30de29e9e4d39758349 https://conda.anaconda.org/conda-forge/noarch/myproxyclient-2.1.1-pyhd8ed1ab_0.conda#bcdbeb2b693eba886583a907840c6421 https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_0.conda#0b57b5368ab7fc7cdc9e3511fa867214 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.1-nompi_py312h21d6d8e_102.conda#9049ba34261ce7106220711d313fcf61 +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_0.conda#5971cc64048943605f352f7f8612de6c +https://conda.anaconda.org/conda-forge/linux-64/psyplot-1.5.1-py312h7900ff3_1.conda#f110e71421e5c86e50232cc027c6d85c +https://conda.anaconda.org/conda-forge/noarch/py-xgboost-2.1.2-cuda118_pyh40095f8_0.conda#aa5881b02bd9555a7b06c709aa33bd20 https://conda.anaconda.org/conda-forge/noarch/pylint-celery-0.3-py_1.tar.bz2#e29456a611a62d3f26105a2f9c68f759 https://conda.anaconda.org/conda-forge/noarch/pylint-django-2.6.1-pyhd8ed1ab_0.conda#d1023ccf92d8235cd4808ef53e274a5e https://conda.anaconda.org/conda-forge/noarch/pylint-flask-0.6-py_0.tar.bz2#5a9afd3d0a61b08d59eed70fab859c1b @@ -559,36 +571,28 @@ https://conda.anaconda.org/conda-forge/linux-64/r-xfun-0.45-r42ha18555a_0.conda# https://conda.anaconda.org/conda-forge/noarch/r-xmlparsedata-1.0.5-r42hc72bb7e_2.conda#2f3614450b54f222c1eff786ec2a45ec https://conda.anaconda.org/conda-forge/linux-64/r-yaml-2.3.8-r42h57805ef_0.conda#97f60a93ca12f4fdd5f44049dcee4345 https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda#5ede4753180c7a550a443c430dc8ab52 -https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.5.2-py312h7a48858_1.conda#6b5f4c68483bd0c22bca9094dafc606b -https://conda.anaconda.org/conda-forge/noarch/seawater-3.3.5-pyhd8ed1ab_0.conda#8e1b01f05e8f97b0fcc284f957175903 -https://conda.anaconda.org/conda-forge/noarch/sparse-0.15.4-pyh267e887_1.conda#40d80cd9fa4cc759c6dba19ea96642db -https://conda.anaconda.org/conda-forge/linux-64/statsmodels-0.14.4-py312hc0a28a1_0.conda#97dc960f3d9911964d73c2cf240baea5 -https://conda.anaconda.org/conda-forge/noarch/tifffile-2024.9.20-pyhd8ed1ab_0.conda#6de55c7859ed314159eaf2b7b4f19cc7 +https://conda.anaconda.org/conda-forge/linux-64/scikit-image-0.24.0-py312hf9745cd_3.conda#3612f99c589d51c363c8b90c0bcf3a18 +https://conda.anaconda.org/conda-forge/noarch/seaborn-base-0.13.2-pyhd8ed1ab_2.conda#b713b116feaf98acdba93ad4d7f90ca1 https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.26.0-h86fa3b2_0.conda#061175d9d4c046a1cf8bffe95a359fab -https://conda.anaconda.org/conda-forge/noarch/xarray-2024.10.0-pyhd8ed1ab_0.conda#53e365732dfa053c4d19fc6b927392c4 -https://conda.anaconda.org/conda-forge/noarch/zarr-2.18.3-pyhd8ed1ab_0.conda#41abde21508578e02e3fd492e82a05cd -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.23.0-py312hf9745cd_2.conda#cc3ecff140731b46b970a7c4787b1823 https://conda.anaconda.org/conda-forge/linux-64/cdo-2.4.1-h9fe33b1_1.conda#a326dab3d2a1a8e32c2a6f792fac3161 -https://conda.anaconda.org/conda-forge/noarch/cf_xarray-0.10.0-pyhd8ed1ab_0.conda#9437cfe346eab83b011b4def99f0e879 https://conda.anaconda.org/conda-forge/noarch/cfgrib-0.9.14.1-pyhd8ed1ab_0.conda#1870fe8c9bd8967429e227be28ab94d2 https://conda.anaconda.org/conda-forge/noarch/chart-studio-1.1.0-pyh9f0ad1d_0.tar.bz2#acd9a12a35e5a0221bdf39eb6e4811dc -https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_0.conda#53df00540de0348ed1b2a62684dd912b https://conda.anaconda.org/conda-forge/noarch/dask-jobqueue-0.9.0-pyhd8ed1ab_0.conda#a201de7d36907f2355426e019168d337 https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 https://conda.anaconda.org/conda-forge/linux-64/imagemagick-7.1.1_39-imagemagick_hcfc5581_1.conda#1144fe07cf76921ec664b868453027d3 +https://conda.anaconda.org/conda-forge/noarch/iris-3.10.0-pyha770c72_2.conda#5d8984ceb5fdf85110ca7108114ecc18 https://conda.anaconda.org/conda-forge/linux-64/libarrow-17.0.0-h8d2e343_13_cpu.conda#dc379f362829d5df5ce6722565110029 https://conda.anaconda.org/conda-forge/linux-64/libgdal-kea-3.9.2-h1df15e4_7.conda#c693e703649051ee9db0fabd4fcd0483 https://conda.anaconda.org/conda-forge/linux-64/libgdal-netcdf-3.9.2-hf2d2f32_7.conda#4015ef020928219acc0b5c9edbce8d30 https://conda.anaconda.org/conda-forge/linux-64/libgdal-tiledb-3.9.2-h4a3bace_2.conda#c3fac34ecba2fcf9d5d31a03b975d5a1 +https://conda.anaconda.org/conda-forge/noarch/lime-0.2.0.1-pyhd8ed1ab_1.tar.bz2#789ce01416721a5533fb74aa4361fd13 https://conda.anaconda.org/conda-forge/noarch/multiurl-0.3.2-pyhd8ed1ab_0.conda#9b6cf42ef472b332970282ec87d2e5d4 https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.0-pyhd8ed1ab_0.conda#15b51397e0fe8ea7d7da60d83eb76ebc -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/linux-64/nco-5.2.8-hf7c1f58_0.conda#6cd18a9c6b8269b0cd101ba9cc3d02ab https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda#8dab97d8a9616e07d779782995710aed https://conda.anaconda.org/conda-forge/noarch/prospector-1.12.1-pyhd8ed1ab_0.conda#8621ba9cf057da26d371b87cd2264259 -https://conda.anaconda.org/conda-forge/linux-64/psyplot-1.5.1-py312h7900ff3_1.conda#f110e71421e5c86e50232cc027c6d85c -https://conda.anaconda.org/conda-forge/noarch/py-xgboost-2.1.2-cuda118_pyh40095f8_0.conda#aa5881b02bd9555a7b06c709aa33bd20 -https://conda.anaconda.org/conda-forge/linux-64/pydot-3.0.1-py312h7900ff3_1.conda#c3d006b1d90fa9f5ae436ff9d6c40249 +https://conda.anaconda.org/conda-forge/linux-64/psy-simple-1.5.1-py312h7900ff3_0.conda#683ec8787a523de54b02c885e2c2aefa +https://conda.anaconda.org/conda-forge/linux-64/pydot-3.0.2-py312h7900ff3_0.conda#a972ba77217a2cac592c41dd3cc56dfd https://conda.anaconda.org/conda-forge/noarch/pyroma-4.2-pyhd8ed1ab_0.conda#fe2aca9a5d4cb08105aefc451ef96950 https://conda.anaconda.org/conda-forge/linux-64/r-bigmemory-4.6.4-r42ha503ecb_0.conda#12b6fa8fe80a6494a948c6ea2f34340d https://conda.anaconda.org/conda-forge/linux-64/r-checkmate-2.3.1-r42h57805ef_0.conda#9febce7369c72d991e2399d7d28f3390 @@ -615,19 +619,18 @@ https://conda.anaconda.org/conda-forge/linux-64/r-timechange-0.3.0-r42ha503ecb_0 https://conda.anaconda.org/conda-forge/linux-64/r-xml2-1.3.6-r42hbfba7a4_1.conda#5c3d7a89a2d5e1c0885f92d1aa6fde30 https://conda.anaconda.org/conda-forge/linux-64/r-zoo-1.8_12-r42h57805ef_1.conda#5367d265c0c9c151dea85f1ccb515ec1 https://conda.anaconda.org/conda-forge/noarch/requests-cache-1.2.1-pyhd8ed1ab_0.conda#c6089540fed51a9a829aa19590fa925b -https://conda.anaconda.org/conda-forge/linux-64/scikit-image-0.24.0-py312hf9745cd_3.conda#3612f99c589d51c363c8b90c0bcf3a18 -https://conda.anaconda.org/conda-forge/noarch/seaborn-base-0.13.2-pyhd8ed1ab_2.conda#b713b116feaf98acdba93ad4d7f90ca1 -https://conda.anaconda.org/conda-forge/noarch/cads-api-client-1.5.0-pyhd8ed1ab_0.conda#0ca8f6f735f6171aa178364cdbbebe4d +https://conda.anaconda.org/conda-forge/noarch/seaborn-0.13.2-hd8ed1ab_2.conda#a79d8797f62715255308d92d3a91ef2e +https://conda.anaconda.org/conda-forge/noarch/xgboost-2.1.2-cuda118_pyh256f914_0.conda#2dcf3e60ef65fd4cb95048f2491f6a89 +https://conda.anaconda.org/conda-forge/noarch/cads-api-client-1.5.2-pyhd8ed1ab_0.conda#e7005effa79f1493a51404873d6eb5a0 https://conda.anaconda.org/conda-forge/noarch/esgf-pyclient-0.3.1-pyhd8ed1ab_4.conda#f481c17430f801e68ee3b57cc30ecd2e -https://conda.anaconda.org/conda-forge/noarch/iris-3.10.0-pyha770c72_2.conda#5d8984ceb5fdf85110ca7108114ecc18 https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-17.0.0-h5888daf_13_cpu.conda#b654d072b8d5da807495e49b28a0b884 https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.9.2-ha770c72_7.conda#63779711c7afd4fcf9cea67538baa67a https://conda.anaconda.org/conda-forge/linux-64/libparquet-17.0.0-h39682fd_13_cpu.conda#49c60a8dc089d8127b9368e9eb6c1a77 -https://conda.anaconda.org/conda-forge/noarch/lime-0.2.0.1-pyhd8ed1ab_1.tar.bz2#789ce01416721a5533fb74aa4361fd13 https://conda.anaconda.org/conda-forge/noarch/mapgenerator-1.0.7-pyhd8ed1ab_0.conda#d18db96ef2a920b0ecefe30282b0aecf https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_1.conda#e2d2abb421c13456a9a9f80272fdf543 https://conda.anaconda.org/conda-forge/noarch/prov-2.0.0-pyhd3deb0d_0.tar.bz2#aa9b3ad140f6c0668c646f32e20ccf82 -https://conda.anaconda.org/conda-forge/linux-64/psy-simple-1.5.1-py312h7900ff3_0.conda#683ec8787a523de54b02c885e2c2aefa +https://conda.anaconda.org/conda-forge/linux-64/psy-maps-1.5.0-py312h7900ff3_1.conda#080bc8f34a9cb0ab81ae0369fd43b7ab +https://conda.anaconda.org/conda-forge/linux-64/psy-reg-1.5.0-py312h7900ff3_1.conda#ea719cfcc2e5b815b137b7082ece8aeb https://conda.anaconda.org/conda-forge/noarch/py-cordex-0.8.0-pyhd8ed1ab_0.conda#fba377622e74ee0bbeb8ccae9fa593d3 https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-17.0.0-py312h01725c0_2_cpu.conda#add603bfa43d9bf3f06783f780e1a817 https://conda.anaconda.org/conda-forge/noarch/python-cdo-1.6.0-pyhd8ed1ab_0.conda#3fd1a0b063c1fbbe4b7bd5a5a7601e84 @@ -646,16 +649,12 @@ https://conda.anaconda.org/conda-forge/noarch/r-scales-1.3.0-r42hc72bb7e_0.conda https://conda.anaconda.org/conda-forge/linux-64/r-specsverification-0.5_3-r42h7525677_2.tar.bz2#1521b8a303852af0496245e368d3c61c https://conda.anaconda.org/conda-forge/linux-64/r-splancs-2.01_45-r42hbcb9c34_0.conda#bcd96dc088f54514a54d57e6b8ed51b6 https://conda.anaconda.org/conda-forge/linux-64/r-vctrs-0.6.5-r42ha503ecb_0.conda#5689030c60302fb5bb7a48b54c11dbe8 -https://conda.anaconda.org/conda-forge/noarch/seaborn-0.13.2-hd8ed1ab_2.conda#a79d8797f62715255308d92d3a91ef2e https://conda.anaconda.org/conda-forge/noarch/xesmf-0.8.7-pyhd8ed1ab_0.conda#42301f78a4c6d2500f891b9723160d5c -https://conda.anaconda.org/conda-forge/noarch/xgboost-2.1.2-cuda118_pyh256f914_0.conda#2dcf3e60ef65fd4cb95048f2491f6a89 https://conda.anaconda.org/conda-forge/noarch/cdsapi-0.7.4-pyhd8ed1ab_0.conda#67a29b663023b8c0e3d8a73013ea3e23 https://conda.anaconda.org/conda-forge/linux-64/fiona-1.10.1-py312h5aa26c2_1.conda#4a30f4277a1894928a7057d0e14c1c95 https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-17.0.0-h5888daf_13_cpu.conda#cd2c36e8865b158b82f61c6aac28b7e1 https://conda.anaconda.org/conda-forge/noarch/nbconvert-pandoc-7.16.4-hd8ed1ab_1.conda#37cec2cf68f4c09563d8bc833791096b https://conda.anaconda.org/conda-forge/linux-64/ncl-6.6.2-h7cb714c_54.conda#7363202c15302898deb49e82ca3e5f58 -https://conda.anaconda.org/conda-forge/linux-64/psy-maps-1.5.0-py312h7900ff3_1.conda#080bc8f34a9cb0ab81ae0369fd43b7ab -https://conda.anaconda.org/conda-forge/linux-64/psy-reg-1.5.0-py312h7900ff3_1.conda#ea719cfcc2e5b815b137b7082ece8aeb https://conda.anaconda.org/conda-forge/noarch/r-cyclocomp-1.1.1-r42hc72bb7e_0.conda#6bd41a85dc43541400311eca03d4e2d4 https://conda.anaconda.org/conda-forge/noarch/r-gridextra-2.3-r42hc72bb7e_1005.conda#da116b29105a8d48571975a185e9bb94 https://conda.anaconda.org/conda-forge/noarch/r-lmomco-2.5.1-r42hc72bb7e_0.conda#6efbdfe5d41b3ef5652be1ea2e0a6e3c @@ -672,11 +671,11 @@ https://conda.anaconda.org/conda-forge/linux-64/r-tibble-3.2.1-r42h57805ef_2.con https://conda.anaconda.org/conda-forge/linux-64/pyarrow-17.0.0-py312h9cebb41_2.conda#5f7d505626cb057e1320bbd46dd02ef2 https://conda.anaconda.org/conda-forge/noarch/r-ggplot2-3.5.1-r42hc72bb7e_0.conda#77cc0254e0dc92e5e7791ce20a170f74 https://conda.anaconda.org/conda-forge/noarch/r-rematch2-2.1.2-r42hc72bb7e_3.conda#5ccfee6f3b94e6b247c7e1929b24f1cc -https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.16-pyhd8ed1ab_0.conda#81de1c44ab7f6cadab4a59b6d76dfa87 +https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.17-pyhd8ed1ab_0.conda#4f75a3a76e9f693fc33be59485f46fcf https://conda.anaconda.org/conda-forge/noarch/r-styler-1.10.3-r42hc72bb7e_0.conda#1b2b8fa85a9d0556773abac4763d8ef9 https://conda.anaconda.org/conda-forge/linux-64/r-tlmoments-0.7.5.3-r42ha503ecb_1.conda#6aa1414e06dfffc39d3b5ca78b60b377 https://conda.anaconda.org/conda-forge/noarch/r-viridis-0.6.5-r42hc72bb7e_0.conda#959f69b6dfd4b620a15489975fa27670 -https://conda.anaconda.org/conda-forge/noarch/dask-2024.10.0-pyhd8ed1ab_0.conda#719832923b1d98803d07b2ca38eb3baa +https://conda.anaconda.org/conda-forge/noarch/dask-2024.11.0-pyhd8ed1ab_0.conda#9a25bf7e2a910e85209218896f2adeb9 https://conda.anaconda.org/conda-forge/linux-64/r-fields-15.2-r42h61816a4_0.conda#d84fe2f9e893e92089370b195e2263a0 https://conda.anaconda.org/conda-forge/noarch/r-spei-1.8.1-r42hc72bb7e_1.conda#7fe060235dac0fc0b3d387f98e79d128 https://conda.anaconda.org/conda-forge/noarch/iris-esmf-regrid-0.11.0-pyhd8ed1ab_1.conda#86286b197e33e3b034416c18ba0f574c From eb627592325e91fc5021bb38a86f7905de88e2d5 Mon Sep 17 00:00:00 2001 From: Manuel Schlund <32543114+schlunma@users.noreply.github.com> Date: Wed, 13 Nov 2024 13:16:39 +0100 Subject: [PATCH 15/36] Remove recipe filler utility (#3777) --- doc/sphinx/source/utils.rst | 57 -- esmvaltool/utils/recipe_filler.py | 914 ------------------------ setup.py | 2 - tests/integration/test_recipe_filler.py | 211 ------ 4 files changed, 1184 deletions(-) delete mode 100755 esmvaltool/utils/recipe_filler.py delete mode 100644 tests/integration/test_recipe_filler.py diff --git a/doc/sphinx/source/utils.rst b/doc/sphinx/source/utils.rst index 536b78ebee..d0783ff2a4 100644 --- a/doc/sphinx/source/utils.rst +++ b/doc/sphinx/source/utils.rst @@ -383,63 +383,6 @@ klaus.zimmermann@smhi.se .. _pygithub: https://pygithub.readthedocs.io/en/latest/introduction.html -Recipe filler -============= - -If you need to fill in a blank recipe with additional datasets, you can do that with -the command `recipe_filler`. This runs a tool to obtain a set of additional datasets when -given a blank recipe, and you can give an arbitrary number of data parameters. The blank recipe -should contain, to the very least, a list of diagnostics, each with their variable(s). -Example of running the tool: - -.. code-block:: bash - - recipe_filler recipe.yml - -where `recipe.yml` is the recipe that needs to be filled with additional datasets; a minimal -example of this recipe could be: - -.. code-block:: yaml - - diagnostics: - diagnostic: - variables: - ta: - mip: Amon # required - start_year: 1850 # required - end_year: 1900 # required - - -Key features ------------- - -- you can add as many variable parameters as are needed; if not added, the - tool will use the ``"*"`` wildcard and find all available combinations; -- you can restrict the number of datasets to be looked for with the ``dataset:`` - key for each variable, pass a list of datasets as value, e.g. - ``dataset: [MPI-ESM1-2-LR, MPI-ESM-LR]``; -- you can specify a pair of experiments, e.g. ``exp: [historical, rcp85]`` - for each variable; this will look for each available dataset per experiment - and assemble an aggregated data stretch from each experiment to complete - for the total data length specified by ``start_year`` and ``end_year``; equivalent to - ESMValTool's syntax on multiple experiments; this option needs an ensemble - to be declared explicitly; it will return no entry if there are gaps in data; -- ``start_year`` and ``end_year`` are required and are used to filter out the - datasets that don't have data in the interval; as noted above, the tool will not - return datasets with partial coverage from ``start_year`` to ``end_year``; - if you want all possible years hence no filtering on years just use ``"*"`` - for start and end years; -- ``config-user: rootpath: CMIPX`` may be a list, rootpath lists are supported; -- all major DRS paths (including ``default``, ``BADC``, ``ETHZ`` etc) are supported; -- speedup is achieved through CMIP mip tables lookup, so ``mip`` is required in recipe; - -Caveats -------- - -- the tool doesn't yet work with derived variables; it will not return any available datasets; -- operation restricted to CMIP data only, OBS lookup is not available yet. - - Extracting a list of input files from the provenance ==================================================== diff --git a/esmvaltool/utils/recipe_filler.py b/esmvaltool/utils/recipe_filler.py deleted file mode 100755 index 40f637c6d5..0000000000 --- a/esmvaltool/utils/recipe_filler.py +++ /dev/null @@ -1,914 +0,0 @@ -""" -Fill in a blank recipe with additional datasets. - -Tool to obtain a set of additional datasets when given a blank recipe. -The blank recipe should contain, to the very least, a list of diagnostics -each with their variable(s). Example of minimum settings: - -diagnostics: - diagnostic: - variables: - ta: - mip: Amon - start_year: 1850 - end_year: 1900 - -Note that the tool will exit if any of these minimum settings are missing! - -Key features: - -- you can add as many variable parameters as are needed; if not added, the - tool will use the "*" wildcard and find all available combinations; -- you can restrict the number of datasets to be looked for with the `dataset:` - key for each variable, pass a list of datasets as value, e.g. - `dataset: [MPI-ESM1-2-LR, MPI-ESM-LR]`; -- you can specify a pair of experiments eg `exp: [rcp26, rcp85]` - for each variable; this will look for each available dataset per experiment - and assemble an aggregated data stretch from each experiment; equivalent to - esmvaltool's syntax of multiple experiments; this option needs an ensemble - to be declared explicitly; it will return no entry if there are gaps in data -- `start_year` and `end_year` are mandatory and are used to filter out the - datasets that don't have data in the interval; if you want all possible years - hence no filtering on years just use "*" for start and end years; -- `config-user: rootpath: CMIPX` may be a list, rootpath lists are supported; - -Caveats: - -- the tool doesn't yet work for derived variables; -- operation restricted to CMIP data. - -Have fun! -""" -import argparse -import datetime -import itertools -import logging -import logging.config -import os -import shutil -import time -from glob import glob -from pathlib import Path - -import esmvalcore -import yaml - -from esmvalcore import __version__ as core_ver -from esmvalcore.cmor.table import CMOR_TABLES, read_cmor_tables -from packaging import version as pkg_version -from ruamel.yaml import YAML - -logger = logging.getLogger(__name__) - -CFG = {} - - -def _purge_file_handlers(cfg: dict) -> None: - """Remove handlers with filename set. - - This is used to remove file handlers which require an output - directory to be set. - """ - cfg['handlers'] = { - name: handler - for name, handler in cfg['handlers'].items() - if 'filename' not in handler - } - prev_root = cfg['root']['handlers'] - cfg['root']['handlers'] = [ - name for name in prev_root if name in cfg['handlers'] - ] - - -def _update_stream_level(cfg: dict, level=None): - """Update the log level for the stream handlers.""" - handlers = cfg['handlers'] - - for handler in handlers.values(): - if level is not None and 'stream' in handler: - if handler['stream'] in ('ext://sys.stdout', 'ext://sys.stderr'): - handler['level'] = level.upper() - - -def _get_log_files(cfg: dict, output_dir: str = None) -> list: - """Initialize log files for the file handlers.""" - log_files = [] - - handlers = cfg['handlers'] - - for handler in handlers.values(): - filename = handler.get('filename', None) - - if filename: - if not os.path.isabs(filename): - handler['filename'] = os.path.join(output_dir, filename) - log_files.append(handler['filename']) - - return log_files - - -def configure_logging(cfg_file: str = None, - output_dir: str = None, - console_log_level: str = None) -> list: - """Configure logging. - - Parameters - ---------- - cfg_file : str, optional - Logging config file. If `None`, defaults to `configure-logging.yml` - output_dir : str, optional - Output directory for the log files. If `None`, log only to the console. - console_log_level : str, optional - If `None`, use the default (INFO). - - Returns - ------- - log_files : list - Filenames that will be logged to. - """ - if cfg_file is None: - cfg_loc = Path(esmvalcore.__file__ + "esmvalcore") - if pkg_version.parse(core_ver) < pkg_version.parse('2.8.0'): - cfg_file = cfg_loc.parents[0] / '_config' / 'config-logging.yml' - else: - cfg_file = cfg_loc.parents[0] / 'config' / 'config-logging.yml' - - cfg_file = Path(cfg_file).absolute() - - with open(cfg_file) as file_handler: - cfg = yaml.safe_load(file_handler) - - if output_dir is None: - _purge_file_handlers(cfg) - - log_files = _get_log_files(cfg, output_dir=output_dir) - _update_stream_level(cfg, level=console_log_level) - - logging.config.dictConfig(cfg) - logging.Formatter.converter = time.gmtime - logging.captureWarnings(True) - - return log_files - - -def read_config_developer_file(cfg_file=None): - """Read the developer's configuration file.""" - if cfg_file is None: - cfg_loc = Path(esmvalcore.__file__ + "esmvalcore") - cfg_file = cfg_loc.parents[0] / 'config-developer.yml' - - with open(cfg_file, 'r') as file: - cfg = yaml.safe_load(file) - - return cfg - - -def _normalize_path(path): - """Normalize paths. - - Expand ~ character and environment variables and convert path to absolute. - - Parameters - ---------- - path: str - Original path - - Returns - ------- - str: - Normalized path - """ - if path is None: - return None - return os.path.abspath(os.path.expanduser(os.path.expandvars(path))) - - -def read_config_user_file(config_file, folder_name, options=None): - """Read config user file and store settings in a dictionary.""" - if not config_file: - config_file = '~/.esmvaltool/config-user.yml' - config_file = os.path.abspath( - os.path.expandvars(os.path.expanduser(config_file))) - # Read user config file - if not os.path.exists(config_file): - print(f"ERROR: Config file {config_file} does not exist") - - with open(config_file, 'r') as file: - cfg = yaml.safe_load(file) - - if options is None: - options = dict() - for key, value in options.items(): - cfg[key] = value - - # set defaults - defaults = { - 'compress_netcdf': False, - 'exit_on_warning': False, - 'output_file_type': 'png', - 'output_dir': 'esmvaltool_output', - 'auxiliary_data_dir': 'auxiliary_data', - 'save_intermediary_cubes': False, - 'remove_preproc_dir': True, - 'max_parallel_tasks': None, - 'run_diagnostic': True, - 'profile_diagnostic': False, - 'config_developer_file': None, - 'drs': {}, - } - - for key in defaults: - if key not in cfg: - logger.info( - "No %s specification in config file, " - "defaulting to %s", key, defaults[key]) - cfg[key] = defaults[key] - - cfg['output_dir'] = _normalize_path(cfg['output_dir']) - cfg['auxiliary_data_dir'] = _normalize_path(cfg['auxiliary_data_dir']) - - cfg['config_developer_file'] = _normalize_path( - cfg['config_developer_file']) - - for key in cfg['rootpath']: - root = cfg['rootpath'][key] - if isinstance(root, str): - cfg['rootpath'][key] = [_normalize_path(root)] - else: - cfg['rootpath'][key] = [_normalize_path(path) for path in root] - - # insert a directory date_time_recipe_usertag in the output paths - now = datetime.datetime.utcnow().strftime("%Y%m%d_%H%M%S") - new_subdir = '_'.join((folder_name, now)) - cfg['output_dir'] = os.path.join(cfg['output_dir'], new_subdir) - - # create subdirectories - cfg['preproc_dir'] = os.path.join(cfg['output_dir'], 'preproc') - cfg['work_dir'] = os.path.join(cfg['output_dir'], 'work') - cfg['plot_dir'] = os.path.join(cfg['output_dir'], 'plots') - cfg['run_dir'] = os.path.join(cfg['output_dir'], 'run') - - # Read developer configuration file - read_cmor_tables(cfg['config_developer_file']) - - return cfg - - -HEADER = r""" -______________________________________________________________________ - _____ ____ __ ____ __ _ _____ _ - | ____/ ___|| \/ \ \ / /_ _| |_ _|__ ___ | | - | _| \___ \| |\/| |\ \ / / _` | | | |/ _ \ / _ \| | - | |___ ___) | | | | \ V / (_| | | | | (_) | (_) | | - |_____|____/|_| |_| \_/ \__,_|_| |_|\___/ \___/|_| -______________________________________________________________________ - -""" + __doc__ - -dataset_order = [ - 'dataset', 'project', 'exp', 'mip', 'ensemble', 'grid', 'start_year', - 'end_year' -] - -# cmip eras -cmip_eras = ["CMIP5", "CMIP6"] - -# The base dictionairy (all wildcards): -base_dict = { - 'institute': '*', - 'dataset': '*', - 'project': '*', - 'exp': '*', - 'frequency': '*', - 'ensemble': '*', - 'mip': '*', - 'modeling_realm': '*', - 'short_name': '*', - 'grid': '*', - 'start_year': '*', - 'end_year': '*', - 'activity': '*', -} - - -def _get_download_dir(yamlconf, cmip_era): - """Get the Download Directory from user config file.""" - if 'download_dir' in yamlconf: - return os.path.join(yamlconf['download_dir'], cmip_era) - return False - - -def _get_site_rootpath(cmip_era): - """Get site (drs) from config-user.yml.""" - config_yml = get_args().config_file - with open(config_yml, 'r') as yamf: - yamlconf = yaml.safe_load(yamf) - drs = yamlconf['drs'][cmip_era] - - download_dir = _get_download_dir(yamlconf, cmip_era) - rootdir = [yamlconf['rootpath'][cmip_era], ] - - if download_dir: - rootdir.append(download_dir) - logger.debug("%s root directory %s", cmip_era, rootdir) - if drs == 'default' and 'default' in yamlconf['rootpath']: - rootdir = [yamlconf['rootpath']['default'], ] - if download_dir: - rootdir.append(download_dir) - - logger.debug("Using drs default and " - "default: %s data directory", rootdir) - - return drs, rootdir - - -def _get_input_dir(cmip_era): - """Get input_dir from config-developer.yml.""" - site = _get_site_rootpath(cmip_era)[0] - yamlconf = read_config_developer_file() - - return yamlconf[cmip_era]['input_dir'][site] - - -def _get_input_file(cmip_era): - """Get input_file from config-developer.yml.""" - yamlconf = read_config_developer_file() - return yamlconf[cmip_era]['input_file'] - - -def _determine_basepath(cmip_era): - """Determine a basepath.""" - if isinstance(_get_site_rootpath(cmip_era)[1], list): - rootpaths = _get_site_rootpath(cmip_era)[1] - else: - rootpaths = [_get_site_rootpath(cmip_era)[1]] - - basepaths = [] - for rootpath in rootpaths: - if _get_input_dir(cmip_era) != os.path.sep: - basepath = os.path.join(rootpath, _get_input_dir(cmip_era), - _get_input_file(cmip_era)) - else: - basepath = os.path.join(rootpath, _get_input_file(cmip_era)) - basepath = basepath.replace('//', '/') - basepaths.append(basepath) - logger.debug("We will look for files of patterns %s", basepaths) - - return basepaths - - -def _overlapping_datasets(files, all_years, start_year, end_year): - """Process overlapping datasets and check for avail data in time range.""" - valid_files = [] - ay_sorted = sorted(all_years) - if ay_sorted[0] <= start_year and ay_sorted[-1] >= end_year: - yr_pairs = sorted( - [all_years[i:i + 2] for i in range(0, len(all_years), 2)]) - yr_pairs = list(k for k, _ in itertools.groupby(yr_pairs)) - d_y = [ - yr_pairs[j][1] - yr_pairs[j + 1][0] - for j in range(len(yr_pairs) - 1) - ] - gaps = [c for c in d_y if c < -1] - if not gaps: - valid_files = files - logger.info("Contiguous data from multiple experiments.") - else: - logger.warning("Data from multiple exps has >1 year gaps! ") - logger.debug("Start %s/end %s requested - " - "files covering %s found.", - start_year, end_year, yr_pairs) - - return valid_files - - -def filter_years(files, start_year, end_year, overlap=False): - """ - Filter out files that are outside requested time range. - - Nifty function that takes a list of files and two years - as arguments; it will build a series of filter dictionaries - and check if data is available for the entire interval; - it will return a single file per dataset, the first file - in the list of files that cover the specified interval; - optional argument `overlap` used if multiple experiments are - used and overlap between datasets is present. - - Parameters - ---------- - files: list - A list of files that need filtering by requested time range. - - start_year: int - Integer start year of requested range. - - end_year: int - Integer end year of requested range. - - overlap: bool - Flag if datasets overlap; defaults to False. - - Returns - ------- - list - List of files which have been identified as falling in - the requested time range; if multiple files within time range - per dataset, the first file will be returned. - - """ - valid_files = [] - available_years = {} - - if start_year == "*" and end_year == "*": - return files - - if not files: - return valid_files - - all_files_roots = [("").join(fil.split("_")[0:-1]) for fil in files] - for fil in files: - available_years[("").join(fil.split("_")[0:-1])] = [] - for fil in files: - available_years[("").join(fil.split("_")[0:-1])].append( - fil.split("_")[-1].strip(".nc").split("-")) - - all_years = [] - for root, yr_list in available_years.items(): - actual_years = [] - yr_list = list(itertools.chain.from_iterable(yr_list)) - for year in yr_list: - if len(year) == 4: - actual_years.append(int(year)) - else: - actual_years.append(int(year[0:4])) - actual_years = sorted(actual_years) - all_years.extend(actual_years) - if not overlap: - actual_years = sorted(list(set(actual_years))) - if actual_years[0] <= start_year and actual_years[-1] >= end_year: - idx = all_files_roots.index(root) - valid_files.append(files[idx]) - - # multiple experiments to complete each other - if overlap: - valid_files = _overlapping_datasets(files, all_years, start_year, - end_year) - - if not valid_files: - logger.warning("No data found to fully cover start " - "%s / end %s as requested!", start_year, end_year) - - return valid_files - - -def _resolve_latestversion(dirname_template): - """Resolve the 'latestversion' tag.""" - for version_separator in ['{latestversion}', '{version}']: - if version_separator in dirname_template: - break - else: - return dirname_template - - # Find latest version - part1, part2 = dirname_template.split(version_separator) - part2 = part2.lstrip(os.sep) - part1_contents = glob(part1) - if part1_contents: - versions = os.listdir(part1_contents[0]) - versions.sort(reverse=True) - for version in ['latest'] + versions: - dirname = os.path.join(part1, version, part2) - if glob(dirname): - return dirname - - return dirname_template - - -def list_all_files(file_dict, cmip_era): - """ - List all files that match the dataset dictionary. - - Function that returns all files that are determined by a - file_dict dictionary; file_dict is keyed on usual parameters - like `dataset`, `project`, `mip` etc; glob.glob is used - to find files; speedup is achieved by replacing wildcards - with values from CMOR tables. - - Parameters - ---------- - file_dict: dict - Dictionary to hold dataset specifications. - - cmip_era: str - Either CMIP5 or CMIP6. - - Returns - ------- - list: - List of found files. - - """ - mip = file_dict['mip'] - short_name = file_dict['short_name'] - try: - frequency = CMOR_TABLES[cmip_era].get_variable(mip, - short_name).frequency - realms = CMOR_TABLES[cmip_era].get_variable(mip, - short_name).modeling_realm - except AttributeError: - logger.warning("Could not find %s CMOR table " - "for variable %s with mip %s", - cmip_era, short_name, mip) - return [] - file_dict['frequency'] = frequency - - basepaths = _determine_basepath(cmip_era) - all_files = [] - - for basepath in basepaths: - new_path = basepath[:] - - # could have multiple realms - for realm in realms: - file_dict['modeling_realm'] = realm - - # load all the files in the custom dict - for key, value in file_dict.items(): - new_path = new_path.replace('{' + key + '}', str(value)) - new_path = _resolve_latestversion(new_path) - if new_path.startswith("~"): - new_path = os.path.expanduser(new_path) - if not new_path.startswith(os.sep): - raise ValueError( - "Could not expand ~ to user home dir " - "please expand it in the config user file!") - logger.info("Expanding path to %s", new_path) - - # Globs all the wildcards into a list of files. - files = glob(new_path) - all_files.extend(files) - if not all_files: - logger.warning("Could not find any file for data specifications.") - - return all_files - - -def _file_to_recipe_dataset(fn_path, cmip_era, file_dict): - """Convert a filename to an recipe ready dataset.""" - # Add the obvious ones - ie the one you requested! - output_dataset = {} - output_dataset['project'] = cmip_era - for key, value in file_dict.items(): - if value == '*': - continue - if key in dataset_order: - output_dataset[key] = value - - # Split file name and base path into directory structure and filenames. - basefiles = _determine_basepath(cmip_era) - _, fnfile = os.path.split(fn_path) - - for basefile in basefiles: - _, basefile = os.path.split(basefile) - # Some of the key words include the splitting character '_' ! - basefile = basefile.replace('short_name', 'shortname') - basefile = basefile.replace('start_year', 'startyear') - basefile = basefile.replace('end_year', 'endyear') - - # Assume filename is separated by '_' - basefile_split = [key.replace("{", "") for key in basefile.split('_')] - basefile_split = [key.replace("}", "") for key in basefile_split] - fnfile_split = fnfile.split('_') - - # iterate through directory structure looking for useful bits. - for base_key, fn_key in zip(basefile_split, fnfile_split): - if base_key == '*.nc': - fn_key = fn_key.replace('.nc', '') - start_year, end_year = fn_key.split('-') - output_dataset['start_year'] = start_year - output_dataset['end_year'] = end_year - elif base_key == "ensemble*.nc": - output_dataset['ensemble'] = fn_key - elif base_key == "grid*.nc": - output_dataset['grid'] = fn_key - elif base_key == "shortname": - pass - else: - output_dataset[base_key] = fn_key - if "exp" in file_dict: - if isinstance(file_dict["exp"], list): - output_dataset["exp"] = file_dict["exp"] - - return output_dataset - - -def _remove_duplicates(add_datasets): - """ - Remove accidental duplicates. - - Close to 0% chances this will ever be used. - May be used when there are actual duplicates in data - storage, we've seen these before, but seldom. - """ - datasets = [] - seen = set() - - for dataset in add_datasets: - orig_exp = dataset["exp"] - dataset["exp"] = str(dataset["exp"]) - tup_dat = tuple(dataset.items()) - if tup_dat not in seen: - seen.add(tup_dat) - dataset["exp"] = orig_exp - datasets.append(dataset) - - return datasets - - -def _check_recipe(recipe_dict): - """Perform a quick recipe check for mandatory fields.""" - do_exit = False - if "diagnostics" not in recipe_dict: - logger.error("Recipe missing diagnostics section.") - do_exit = True - for diag_name, diag in recipe_dict["diagnostics"].items(): - if "variables" not in diag: - logger.error("Diagnostic %s missing variables.", diag_name) - do_exit = True - for var_name, var_pars in diag["variables"].items(): - if "mip" not in var_pars: - logger.error("Variable %s missing mip.", var_name) - do_exit = True - if "start_year" not in var_pars: - logger.error("Variable %s missing start_year.", var_name) - do_exit = True - if "end_year" not in var_pars: - logger.error("Variable %s missing end_year.", var_name) - do_exit = True - if "exp" in var_pars: - if isinstance(var_pars["exp"], - list) and "ensemble" not in var_pars: - logger.error("Asking for experiments list for ") - logger.error("variable %s - you need to ", var_name) - logger.error("define an ensemble for this case.") - do_exit = True - if do_exit: - raise ValueError("Please fix the issues in recipe and rerun") - - -def _check_config_file(user_config_file): - """Perform a quick recipe check for mandatory fields.""" - do_exit = False - if "rootpath" not in user_config_file: - logger.error("Config file missing rootpath section.") - do_exit = True - if "drs" not in user_config_file: - logger.error("Config file missing drs section.") - do_exit = True - for proj in cmip_eras: - if proj not in user_config_file["rootpath"].keys(): - logger.error("Config file missing rootpath for %s", proj) - do_exit = True - if proj not in user_config_file["drs"].keys(): - logger.error("Config file missing drs for %s", proj) - do_exit = True - if do_exit: - raise ValueError("Please fix issues in config file and rerun") - - -def _parse_recipe_to_dicts(yamlrecipe): - """Parse a recipe's variables into a dictionary of dictionairies.""" - output_dicts = {} - for diag in yamlrecipe['diagnostics']: - for variable, var_dict in yamlrecipe['diagnostics'][diag][ - 'variables'].items(): - new_dict = base_dict.copy() - for var_key, var_value in var_dict.items(): - if var_key in new_dict: - new_dict[var_key] = var_value - output_dicts[(diag, variable)] = new_dict - - return output_dicts - - -def _add_datasets_into_recipe(additional_datasets, output_recipe): - """Add the datasets into a new recipe.""" - yaml = YAML() - yaml.default_flow_style = False - with open(output_recipe, 'r') as yamlfile: - cur_yaml = yaml.load(yamlfile) - for diag_var, add_dat in additional_datasets.items(): - if add_dat: - if 'additional_datasets' in cur_yaml['diagnostics']: - cur_yaml['diagnostics'][diag_var[0]]['variables'][ - diag_var[1]]['additional_datasets'].extend(add_dat) - else: - cur_yaml['diagnostics'][diag_var[0]]['variables'][ - diag_var[1]]['additional_datasets'] = add_dat - if cur_yaml: - with open(output_recipe, 'w') as yamlfile: - yaml.dump(cur_yaml, yamlfile) - - -def _find_all_datasets(recipe_dict, cmip_eras): - """Find all datasets explicitly.""" - datasets = [] - for cmip_era in cmip_eras: - if cmip_era == "CMIP6": - activity = "CMIP" - else: - activity = "" - drs, site_path = _get_site_rootpath(cmip_era) - if drs in ["default", "SMHI"]: - logger.info("DRS is %s; filter on dataset disabled.", drs) - datasets = ["*"] - else: - if not isinstance(site_path, list): - site_path = [site_path] - for site_pth in site_path: - if drs in ["BADC", "DKRZ", "CP4CDS"]: - institutes_path = os.path.join(site_pth, activity) - elif drs in ["ETHZ", "RCAST"]: - exp = recipe_dict["exp"][0] - if exp == "*": - exp = "piControl" # all institutes have piControl - mip = recipe_dict["mip"] - var = recipe_dict["short_name"] - institutes_path = os.path.join(site_pth, exp, mip, var) - - if not os.path.isdir(institutes_path): - logger.warning("Path to data %s " - "does not exist; will look everywhere.", - institutes_path) - datasets = ["*"] - return datasets - - institutes = os.listdir(institutes_path) - if drs in ["BADC", "DKRZ", "CP4CDS"]: - for institute in institutes: - datasets.extend( - os.listdir(os.path.join(institutes_path, - institute))) - else: - datasets.extend(institutes) - - return datasets - - -def _get_exp(recipe_dict): - """Get the correct exp as list of single or multiple exps.""" - if isinstance(recipe_dict["exp"], list): - exps_list = recipe_dict["exp"] - logger.info("Multiple %s experiments requested", exps_list) - else: - exps_list = [recipe_dict["exp"]] - logger.info("Single %s experiment requested", exps_list) - - return exps_list - - -def _get_datasets(recipe_dict, cmip_eras): - """Get the correct datasets as list if needed.""" - if recipe_dict["dataset"] == "*": - datasets = _find_all_datasets(recipe_dict, cmip_eras) - return datasets - if isinstance(recipe_dict['dataset'], list): - datasets = recipe_dict['dataset'] - logger.info("Multiple %s datasets requested", datasets) - else: - datasets = [recipe_dict['dataset']] - logger.info("Single %s dataset requested", datasets) - - return datasets - - -def get_args(): - """Parse command line arguments.""" - parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument('recipe', help='Path/name of yaml pilot recipe file') - parser.add_argument('-c', - '--config-file', - default=os.path.join(os.environ["HOME"], '.esmvaltool', - 'config-user.yml'), - help='User configuration file') - - parser.add_argument('-o', - '--output', - default=os.path.join(os.getcwd(), - 'recipe_autofilled.yml'), - help='Output recipe, default recipe_autofilled.yml') - - args = parser.parse_args() - return args - - -def _get_timefiltered_files(recipe_dict, exps_list, cmip_era): - """Obtain all files that correspond to requested time range.""" - # multiple experiments allowed, complement data from each exp - if len(exps_list) > 1: - files = [] - for exp in exps_list: - recipe_dict["exp"] = exp - files.extend(list_all_files(recipe_dict, cmip_era)) - files = filter_years(files, - recipe_dict["start_year"], - recipe_dict["end_year"], - overlap=True) - recipe_dict["exp"] = exps_list - - else: - files = list_all_files(recipe_dict, cmip_era) - files = filter_years(files, recipe_dict["start_year"], - recipe_dict["end_year"]) - - return files - - -def run(): - """Run the `recipe_filler` tool. Help in __doc__ and via --help.""" - # Get arguments - args = get_args() - input_recipe = args.recipe - output_recipe = args.output - cmip_eras = ["CMIP5", "CMIP6"] - - # read the config file - config_user = read_config_user_file(args.config_file, - 'recipe_filler', - options={}) - - # configure logger - run_dir = os.path.join(config_user['output_dir'], 'recipe_filler') - if not os.path.isdir(run_dir): - os.makedirs(run_dir) - log_files = configure_logging(output_dir=run_dir, - console_log_level=config_user['log_level']) - logger.info(HEADER) - logger.info("Using user configuration file: %s", args.config_file) - logger.info("Using pilot recipe file: %s", input_recipe) - logger.info("Writing filled out recipe to: %s", output_recipe) - log_files = "\n".join(log_files) - logger.info("Writing program log files to:\n%s", log_files) - - # check config user file - _check_config_file(config_user) - - # parse recipe - with open(input_recipe, 'r') as yamlfile: - yamlrecipe = yaml.safe_load(yamlfile) - _check_recipe(yamlrecipe) - recipe_dicts = _parse_recipe_to_dicts(yamlrecipe) - - # Create a list of additional_datasets for each diagnostic/variable. - additional_datasets = {} - for (diag, variable), recipe_dict in recipe_dicts.items(): - logger.info("Looking for data for " - "variable %s in diagnostic %s", variable, diag) - new_datasets = [] - if "short_name" not in recipe_dict: - recipe_dict['short_name'] = variable - elif recipe_dict['short_name'] == "*": - recipe_dict['short_name'] = variable - - # adjust cmip era if needed - if recipe_dict['project'] != "*": - cmip_eras = [recipe_dict['project']] - - # get datasets depending on user request; always a list - datasets = _get_datasets(recipe_dict, cmip_eras) - - # get experiments depending on user request; always a list - exps_list = _get_exp(recipe_dict) - - # loop through datasets - for dataset in datasets: - recipe_dict['dataset'] = dataset - logger.info("Seeking data for dataset: %s", dataset) - for cmip_era in cmip_eras: - files = _get_timefiltered_files(recipe_dict, exps_list, - cmip_era) - - # assemble in new recipe - add_datasets = [] - for fn in sorted(files): - fn_dir = os.path.dirname(fn) - logger.info("Data directory: %s", fn_dir) - out = _file_to_recipe_dataset(fn, cmip_era, recipe_dict) - logger.info("New recipe entry: %s", out) - if out is None: - continue - add_datasets.append(out) - new_datasets.extend(add_datasets) - additional_datasets[(diag, variable, cmip_era)] = \ - _remove_duplicates(new_datasets) - - # add datasets to recipe as additional_datasets - shutil.copyfile(input_recipe, output_recipe, follow_symlinks=True) - _add_datasets_into_recipe(additional_datasets, output_recipe) - logger.info("Finished recipe filler. Go get some science done now!") - - -if __name__ == "__main__": - run() diff --git a/setup.py b/setup.py index 6b4636d1f7..86aab79854 100755 --- a/setup.py +++ b/setup.py @@ -250,8 +250,6 @@ def read_description(filename): 'nclcodestyle = esmvaltool.utils.nclcodestyle.nclcodestyle:_main', 'test_recipe = ' 'esmvaltool.utils.testing.recipe_settings.install_expand_run:main', - 'recipe_filler = ' - 'esmvaltool.utils.recipe_filler:run' ], 'esmvaltool_commands': [ 'colortables = ' diff --git a/tests/integration/test_recipe_filler.py b/tests/integration/test_recipe_filler.py deleted file mode 100644 index b78ac8c5f8..0000000000 --- a/tests/integration/test_recipe_filler.py +++ /dev/null @@ -1,211 +0,0 @@ -"""Tests for _data_finder.py.""" -import contextlib -import os -import shutil -import sys -import tempfile - -import pytest -import yaml - -from esmvaltool.utils.recipe_filler import run - - -# Load test configuration -with open(os.path.join(os.path.dirname(__file__), - 'recipe_filler.yml')) as file: - CONFIG = yaml.safe_load(file) - - -@contextlib.contextmanager -def arguments(*args): - backup = sys.argv - sys.argv = list(args) - yield - sys.argv = backup - - -def print_path(path): - """Print path.""" - txt = path - if os.path.isdir(path): - txt += '/' - if os.path.islink(path): - txt += ' -> ' + os.readlink(path) - print(txt) - - -def tree(path): - """Print path, similar to the the `tree` command.""" - print_path(path) - for dirpath, dirnames, filenames in os.walk(path): - for dirname in dirnames: - print_path(os.path.join(dirpath, dirname)) - for filename in filenames: - print_path(os.path.join(dirpath, filename)) - - -def create_file(filename): - """Create an empty file.""" - dirname = os.path.dirname(filename) - if not os.path.exists(dirname): - os.makedirs(dirname) - - with open(filename, 'a'): - pass - - -def create_tree(path, filenames=None, symlinks=None): - """Create directory structure and files.""" - for filename in filenames or []: - create_file(os.path.join(path, filename)) - - for symlink in symlinks or []: - link_name = os.path.join(path, symlink['link_name']) - os.symlink(symlink['target'], link_name) - - -def write_config_user_file(dirname, file_path, drs): - config_file = dirname / 'config-user.yml' - cfg = { - 'log_level': 'info', - 'output_dir': str(dirname / 'recipe_filler_output'), - 'rootpath': { - 'CMIP5': str(dirname / file_path), - 'CMIP6': str(dirname / file_path), - }, - 'drs': { - 'CMIP5': drs, - 'CMIP6': drs, - }, - } - config_file.write_text(yaml.safe_dump(cfg, encoding=None)) - return str(config_file) - - -def write_recipe(dirname, recipe_dict): - recipe_file = dirname / 'recipe.yml' - diags = {'diagnostics': recipe_dict} - recipe_file.write_text(yaml.safe_dump(diags, encoding=None)) - return str(recipe_file) - - -@pytest.fixture -def root(): - """Root function for tests.""" - dirname = tempfile.mkdtemp() - yield os.path.join(dirname, 'output1') - print("Directory structure was:") - tree(dirname) - shutil.rmtree(dirname) - - -def setup_files(tmp_path, root, cfg): - """Create config, recipe ,output recipe etc.""" - user_config_file = write_config_user_file(tmp_path, root, cfg['drs']) - diagnostics = {} - diagnostics["test_diagnostic"] = {} - diagnostics["test_diagnostic"]["variables"] = {} - diagnostics["test_diagnostic"]["variables"]["test_var"] = cfg["variable"] - recipe = write_recipe(tmp_path, diagnostics) - output_recipe = str(tmp_path / "recipe_auto.yml") - - return user_config_file, recipe, output_recipe - - -@pytest.mark.parametrize('cfg', CONFIG['has_additional_datasets']) -def test_adding_datasets(tmp_path, root, cfg): - """Test retrieving additional datasets.""" - create_tree(root, cfg.get('available_files'), - cfg.get('available_symlinks')) - - user_config_file, recipe, output_recipe = setup_files(tmp_path, root, cfg) - - with arguments( - 'recipe_filler', - recipe, - '-c', - user_config_file, - '-o', - output_recipe, - ): - run() - - with open(output_recipe, 'r') as file: - autofilled_recipe = yaml.safe_load(file) - diag = autofilled_recipe["diagnostics"]["test_diagnostic"] - var = diag["variables"]["test_var"] - assert "additional_datasets" in var - - -@pytest.mark.parametrize('cfg', CONFIG['no_additional_datasets']) -def test_not_adding_datasets(tmp_path, root, cfg): - """Test retrieving no additional datasets.""" - create_tree(root, cfg.get('available_files'), - cfg.get('available_symlinks')) - - user_config_file, recipe, output_recipe = setup_files(tmp_path, root, cfg) - - with arguments( - 'recipe_filler', - recipe, - '-c', - user_config_file, - '-o', - output_recipe, - ): - run() - - with open(output_recipe, 'r') as file: - autofilled_recipe = yaml.safe_load(file) - diag = autofilled_recipe["diagnostics"]["test_diagnostic"] - var = diag["variables"]["test_var"] - assert "additional_datasets" not in var - - -def test_bad_var(tmp_path, root): - """Test a bad variable in the works.""" - cfg = CONFIG['bad_variable'][0] - user_config_file, recipe, output_recipe = setup_files(tmp_path, root, cfg) - - # this doesn't fail and it shouldn't since it can go on - # and look for data for other valid variables - with arguments( - 'recipe_filler', - recipe, - '-c', - user_config_file, - '-o', - output_recipe, - ): - run() - - with open(output_recipe, 'r') as file: - autofilled_recipe = yaml.safe_load(file) - diag = autofilled_recipe["diagnostics"]["test_diagnostic"] - var = diag["variables"]["test_var"] - assert "additional_datasets" not in var - - -def test_no_short_name(tmp_path, root): - """Test a bad variable in the works.""" - cfg = CONFIG['no_short_name'][0] - user_config_file, recipe, output_recipe = setup_files(tmp_path, root, cfg) - - # this doesn't fail and it shouldn't since it can go on - # and look for data for other valid variables - with arguments( - 'recipe_filler', - recipe, - '-c', - user_config_file, - '-o', - output_recipe, - ): - run() - - with open(output_recipe, 'r') as file: - autofilled_recipe = yaml.safe_load(file) - diag = autofilled_recipe["diagnostics"]["test_diagnostic"] - var = diag["variables"]["test_var"] - assert "additional_datasets" not in var From c4f757638ea3e78c635cc130ed965d47c32c1d9e Mon Sep 17 00:00:00 2001 From: Valeriu Predoi Date: Wed, 13 Nov 2024 12:23:41 +0000 Subject: [PATCH 16/36] Fix issue related to removal/change of private function imported in `diag_scripts/shared/_supermeans.py` (deprecation in iris=3.11) (#3810) --- esmvaltool/diag_scripts/shared/_supermeans.py | 23 ++++++++++++++++++- 1 file changed, 22 insertions(+), 1 deletion(-) diff --git a/esmvaltool/diag_scripts/shared/_supermeans.py b/esmvaltool/diag_scripts/shared/_supermeans.py index 7099ba4725..8543ca99cf 100644 --- a/esmvaltool/diag_scripts/shared/_supermeans.py +++ b/esmvaltool/diag_scripts/shared/_supermeans.py @@ -13,7 +13,6 @@ import cf_units import iris import iris.coord_categorisation -from iris.coord_categorisation import _pt_date import numpy as np @@ -206,6 +205,28 @@ def add_start_hour(cube, coord, name='diurnal_sampling_hour'): _add_categorised_coord(cube, name, coord, start_hour_from_bounds) +# lifted from iris==3.10 last iris to have it in iris.coord_categorisation +# Private "helper" function +def _pt_date(coord, time): + """Return the datetime of a time-coordinate point. + + Parameters + ---------- + coord : Coord + Coordinate (must be Time-type). + time : float + Value of a coordinate point. + + Returns + ------- + cftime.datetime + + """ + # NOTE: All of the currently defined categorisation functions are + # calendar operations on Time coordinates. + return coord.units.num2date(time, only_use_cftime_datetimes=True) + + def start_hour_from_bounds(coord, _, bounds): """Add hour from bounds.""" return np.array([_pt_date(coord, _bounds[0]).hour for _bounds in bounds]) From de43833ff1238d1c0b5e70bf4b12d67583d8057e Mon Sep 17 00:00:00 2001 From: Valeriu Predoi Date: Thu, 14 Nov 2024 16:35:32 +0000 Subject: [PATCH 17/36] Update environment: pin `iris>=3.11`, unpin `cartopy` and allow for `numpy >=2` (#3811) Co-authored-by: Manuel Schlund <32543114+schlunma@users.noreply.github.com> --- environment.yml | 10 +++++----- environment_osx.yml | 8 ++++---- setup.py | 4 ++-- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/environment.yml b/environment.yml index 270f0f6ecd..72ccf127f6 100644 --- a/environment.yml +++ b/environment.yml @@ -10,27 +10,27 @@ channels: dependencies: - aiohttp - - cartopy <0.24 # https://github.com/ESMValGroup/ESMValTool/issues/3767 + - cartopy - cdo >=2.3.0 - cdsapi - cf-units - cfgrib - cftime - cmocean - - curl <8.10 + - curl <8.10 # https://github.com/ESMValGroup/ESMValTool/issues/3758 - cython - dask !=2024.8.0 # https://github.com/dask/dask/issues/11296 - distributed - ecmwf-api-client - eofs - - esmpy # <8.6 safe https://github.com/SciTools/iris-esmf-regrid/issues/415 + - esmpy - esmvalcore 2.11.* - fiona - fire - fsspec - gdal >=3.9.0 - importlib_metadata <8 # https://github.com/ESMValGroup/ESMValTool/issues/3699 only for Python 3.10/11 and esmpy<8.6 - - iris >=3.6.1 + - iris >=3.11 - iris-esmf-regrid >=0.10.0 # github.com/SciTools-incubator/iris-esmf-regrid/pull/342 - jinja2 - joblib @@ -41,7 +41,7 @@ dependencies: - nc-time-axis - netCDF4 - numba - - numpy !=1.24.3,<2.0 # severe masking bug + - numpy !=1.24.3 # severe masking bug - openpyxl - packaging - pandas==2.1.4 # unpin when ESMValCore released with https://github.com/ESMValGroup/ESMValCore/pull/2529 diff --git a/environment_osx.yml b/environment_osx.yml index 8285b43ecd..242f0a4f56 100644 --- a/environment_osx.yml +++ b/environment_osx.yml @@ -10,7 +10,7 @@ channels: dependencies: - aiohttp - - cartopy <0.24 # https://github.com/ESMValGroup/ESMValTool/issues/3767 + - cartopy - cdo >=2.3.0 - cdsapi - cf-units @@ -22,14 +22,14 @@ dependencies: - distributed - ecmwf-api-client - eofs - - esmpy # <8.6 safe https://github.com/SciTools/iris-esmf-regrid/issues/415 + - esmpy - esmvalcore 2.11.* - fiona - fire - fsspec - gdal >=3.9.0 - importlib_metadata <8 # https://github.com/ESMValGroup/ESMValTool/issues/3699 only for Python 3.10/11 and esmpy<8.6 - - iris >=3.6.1 + - iris >=3.11 - iris-esmf-regrid >=0.10.0 # github.com/SciTools-incubator/iris-esmf-regrid/pull/342 - jinja2 - joblib @@ -40,7 +40,7 @@ dependencies: - nc-time-axis - netCDF4 - numba - - numpy !=1.24.3,<2.0 # severe masking bug + - numpy !=1.24.3 # severe masking bug - openpyxl - packaging - pandas==2.1.4 # unpin when ESMValCore released with https://github.com/ESMValGroup/ESMValCore/pull/2529 diff --git a/setup.py b/setup.py index 86aab79854..cdadaca2d2 100755 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ # Use with pip install . to install from source 'install': [ 'aiohttp', - 'cartopy<0.24', # github.com/ESMValGroup/ESMValTool/issues/3767 + 'cartopy', 'cdo', 'cdsapi', 'cf-units', @@ -67,7 +67,7 @@ 'scikit-image', 'scikit-learn>=1.4.0', # github.com/ESMValGroup/ESMValTool/issues/3504 'scipy', - 'scitools-iris>=3.6.1', + 'scitools-iris>=3.11', 'seaborn', 'seawater', 'shapely>=2', From e070fd5a86bc3832c82e801832cc5cfbdabf7ffb Mon Sep 17 00:00:00 2001 From: Axel Lauer Date: Thu, 21 Nov 2024 12:09:12 +0100 Subject: [PATCH 18/36] Add info on obs tiers to docu (#3624) Co-authored-by: Bouwe Andela Co-authored-by: Romain Beucher --- doc/sphinx/source/input.rst | 34 ++++++++++++++++++++++++++-------- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/doc/sphinx/source/input.rst b/doc/sphinx/source/input.rst index fbc16b45ec..f9bcfafc3e 100644 --- a/doc/sphinx/source/input.rst +++ b/doc/sphinx/source/input.rst @@ -112,6 +112,21 @@ ESMValTool currently supports two ways to perform this reformatting (aka checks and fixes'). Details on this second method are given at the :ref:`end of this chapter `. +Tiers +----- + +All observational datasets are grouped into in three tiers: + +* **Tier 1**: obs4mips and ana4mips datasets. These datasets are publicly and freely available without any license restrictions. These datasets do not need any reformatting and can be used as is with ESMValTool. +* **Tier 2** other freely available datasets that are not obs4mips. There are no license restrictions. These datasets need to be reformatted to be used with ESMValTool ('CMORization', see above). +* **Tier 3** restricted datasets. Datasets which require registration to be downloaded or that can only be obtained upon request from the respective authors. License restrictions do not allow us to redistribute Tier 3 datasets. The data have to be obtained and reformatted by the user ('CMORization', see above). + +[!NOTE] +.. _tier3_note: +For some of the Tier 3 datasets, we obtained permission from the dataset providers to share the data among ESMValTool users on HPC systems. These Tier 3 datasets are marked with an asterisk in the table in section :ref:`supported datasets below`. + +An overview of the Tier 2 and Tier 3 datasets for which a CMORizing script is available in ESMValTool v2.0 is given in section :ref:`supported datasets below`. + A collection of readily CMORized OBS and OBS6 datasets can be accessed directly on CEDA/JASMIN and DKRZ. At CEDA/JASMIN OBS and OBS6 data is stored in the `esmeval` Group Workspace (GWS), and to be granted read (and execute) permissions to the GWS, one must apply at https://accounts.jasmin.ac.uk/services/group_workspaces/esmeval/ ; after permission has been granted, the user @@ -246,7 +261,7 @@ A list of the datasets for which a CMORizers is available is provided in the fol +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | CALIPSO-GOCCP | clcalipso (cfMon) | 2 | NCL | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| CALIPSO-ICECLOUD | cli (AMon) | 3 | NCL | +| CALIPSO-ICECLOUD* [#t3]_ | cli (AMon) | 3 | NCL | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | CDS-SATELLITE-ALBEDO | bdalb (Lmon), bhalb (Lmon) | 3 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ @@ -330,7 +345,7 @@ A list of the datasets for which a CMORizers is available is provided in the fol +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | ESRL | co2s (Amon) | 2 | NCL | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| FLUXCOM | gpp (Lmon) | 3 | Python | +| FLUXCOM* [#t3]_ | gpp (Lmon) | 3 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | GCP2018 | fgco2 (Omon [#note3]_), nbp (Lmon [#note3]_) | 2 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ @@ -380,17 +395,17 @@ A list of the datasets for which a CMORizers is available is provided in the fol +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | Landschuetzer2020 | spco2 (Omon) | 2 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| MAC-LWP | lwp, lwpStderr (Amon) | 3 | NCL | +| MAC-LWP* [#t3]_ | lwp, lwpStderr (Amon) | 3 | NCL | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | MERRA | cli, clivi, clt, clw, clwvi, hur, hus, lwp, pr, prw, ps, psl, rlut, rlutcs, rsdt, rsut, rsutcs, ta, | 3 | NCL | | | tas, ts, ua, va, wap, zg (Amon) | | | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| MERRA2 | sm (Lmon) | 3 | Python | +| MERRA2* [#t3]_ | sm (Lmon) | 3 | Python | | | clt, pr, evspsbl, hfss, hfls, huss, prc, prsn, prw, ps, psl, rlds, rldscs, rlus, rlut, rlutcs, rsds, | | | | | rsdscs, rsdt, tas, tasmin, tasmax, tauu, tauv, ts, uas, vas, rsus, rsuscs, rsut, rsutcs, ta, ua, va, | | | | | tro3, zg, hus, wap, hur, cl, clw, cli, clwvi, clivi (Amon) | | | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| MLS-AURA | hur, hurStderr (day) | 3 | Python | +| MLS-AURA* [#t3]_ | hur, hurStderr (day) | 3 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | MOBO-DIC_MPIM | dissic (Omon) | 2 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ @@ -400,7 +415,7 @@ A list of the datasets for which a CMORizers is available is provided in the fol +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | MSWEP [#note1]_ | pr | 3 | n/a | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| MTE | gpp, gppStderr (Lmon) | 3 | Python | +| MTE* [#t3]_ | gpp, gppStderr (Lmon) | 3 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | NCEP-NCAR-R1 | clt, hur, hurs, hus, pr, prw, psl, rlut, rlutcs, rsut, rsutcs, sfcWind, ta, tas, | 2 | Python | | | tasmax, tasmin, ts, ua, va, wap, zg (Amon) | | | @@ -410,7 +425,7 @@ A list of the datasets for which a CMORizers is available is provided in the fol +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | NDP | cVeg (Lmon) | 3 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| NIWA-BS | toz, tozStderr (Amon) | 3 | NCL | +| NIWA-BS* [#t3]_ | toz, tozStderr (Amon) | 3 | NCL | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | NOAA-CIRES-20CR-V2 | clt, clwvi, hus, prw, rlut, rsut, pr, tauu, tauv (Amon) | 2 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ @@ -448,7 +463,7 @@ A list of the datasets for which a CMORizers is available is provided in the fol +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | TCOM-N2O | n2o (Amon [#note3]_) | 2 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| UWisc | clwvi, lwpStderr (Amon) | 3 | NCL | +| UWisc* [#t3]_ | clwvi, lwpStderr (Amon) | 3 | NCL | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | WFDE5 | tas, pr (Amon, day) | 2 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ @@ -456,6 +471,9 @@ A list of the datasets for which a CMORizers is available is provided in the fol | | no3, o2, po4, si (Oyr) | | | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +.. [#t3] We obtained permission from the dataset provider to share this dataset + among ESMValTool users on HPC systems. + .. [#note1] CMORization is built into ESMValTool through the native6 project, so there is no separate CMORizer script. From dc23cdf484d4194aad6fbc8d673452b1021e2c5f Mon Sep 17 00:00:00 2001 From: Emma Hogan Date: Thu, 21 Nov 2024 16:15:47 +0000 Subject: [PATCH 19/36] Recipe Test Workflow (RTW) prototype (#3210) Co-authored-by: mo-tgeddes <108924122+mo-tgeddes@users.noreply.github.com> Co-authored-by: Katherine Tomkins Co-authored-by: Jon Lillis Co-authored-by: Jon Lillis <68286976+Jon-Lillis@users.noreply.github.com> Co-authored-by: Andrew Clark Co-authored-by: Alistair Sellar Co-authored-by: Alistair Sellar Co-authored-by: Alistair Sellar <16133375+alistairsellar@users.noreply.github.com> Co-authored-by: Ed <146008263+mo-gill@users.noreply.github.com> Co-authored-by: chrisbillowsMO <152496175+chrisbillowsMO@users.noreply.github.com> Co-authored-by: Valeriu Predoi Co-authored-by: sloosvel <45196700+sloosvel@users.noreply.github.com> --- .codacy.yml | 3 +- .github/CODEOWNERS | 1 + .github/workflows/check-rtw.yml | 83 ++++++++ .zenodo.json | 32 ++- CITATION.cff | 34 +++- doc/sphinx/source/gensidebar.py | 2 +- doc/sphinx/source/utils/RTW/about.rst | 14 ++ doc/sphinx/source/utils/RTW/add_a_recipe.rst | 118 +++++++++++ doc/sphinx/source/utils/RTW/common.txt | 33 ++++ doc/sphinx/source/utils/RTW/glossary.rst | 39 ++++ doc/sphinx/source/utils/RTW/index.rst | 11 ++ .../source/utils/RTW/tested_recipes.rst | 19 ++ .../source/utils/RTW/user_guide/index.rst | 9 + .../utils/RTW/user_guide/quick_start.rst | 42 ++++ .../source/utils/RTW/user_guide/workflow.rst | 105 ++++++++++ doc/sphinx/source/{ => utils}/utils.rst | 14 ++ .../app/compare/rose-app.conf | 4 + .../app/configure/bin/__init__.py | 0 .../app/configure/bin/configure.py | 145 ++++++++++++++ .../app/configure/bin/test_configure.py | 76 +++++++ .../app/configure/rose-app.conf | 2 + .../app/get_esmval/bin/clone_latest_esmval.sh | 19 ++ .../app/get_esmval/opt/rose-app-jasmin.conf | 10 + .../get_esmval/opt/rose-app-metoffice.conf | 7 + .../app/get_esmval/rose-app.conf | 0 .../app/install_env_file/rose-app.conf | 11 ++ .../app/process/rose-app.conf | 5 + .../utils/recipe_test_workflow/flow.cylc | 120 ++++++++++++ .../recipe_test_workflow/meta/rose-meta.conf | 185 ++++++++++++++++++ .../opt/rose-suite-jasmin.conf | 10 + .../opt/rose-suite-metoffice.conf | 10 + .../recipe_test_workflow/rose-suite.conf | 24 +++ .../recipe_test_workflow/rose-suite.info | 6 + .../recipe_test_workflow/site/jasmin-env | 59 ++++++ .../recipe_test_workflow/site/jasmin.cylc | 44 +++++ .../recipe_test_workflow/site/metoffice-env | 55 ++++++ .../recipe_test_workflow/site/metoffice.cylc | 60 ++++++ setup.cfg | 3 +- 38 files changed, 1398 insertions(+), 16 deletions(-) create mode 100644 .github/workflows/check-rtw.yml create mode 100644 doc/sphinx/source/utils/RTW/about.rst create mode 100644 doc/sphinx/source/utils/RTW/add_a_recipe.rst create mode 100644 doc/sphinx/source/utils/RTW/common.txt create mode 100644 doc/sphinx/source/utils/RTW/glossary.rst create mode 100644 doc/sphinx/source/utils/RTW/index.rst create mode 100644 doc/sphinx/source/utils/RTW/tested_recipes.rst create mode 100644 doc/sphinx/source/utils/RTW/user_guide/index.rst create mode 100644 doc/sphinx/source/utils/RTW/user_guide/quick_start.rst create mode 100644 doc/sphinx/source/utils/RTW/user_guide/workflow.rst rename doc/sphinx/source/{ => utils}/utils.rst (98%) create mode 100644 esmvaltool/utils/recipe_test_workflow/app/compare/rose-app.conf create mode 100644 esmvaltool/utils/recipe_test_workflow/app/configure/bin/__init__.py create mode 100755 esmvaltool/utils/recipe_test_workflow/app/configure/bin/configure.py create mode 100644 esmvaltool/utils/recipe_test_workflow/app/configure/bin/test_configure.py create mode 100644 esmvaltool/utils/recipe_test_workflow/app/configure/rose-app.conf create mode 100755 esmvaltool/utils/recipe_test_workflow/app/get_esmval/bin/clone_latest_esmval.sh create mode 100644 esmvaltool/utils/recipe_test_workflow/app/get_esmval/opt/rose-app-jasmin.conf create mode 100644 esmvaltool/utils/recipe_test_workflow/app/get_esmval/opt/rose-app-metoffice.conf create mode 100644 esmvaltool/utils/recipe_test_workflow/app/get_esmval/rose-app.conf create mode 100644 esmvaltool/utils/recipe_test_workflow/app/install_env_file/rose-app.conf create mode 100644 esmvaltool/utils/recipe_test_workflow/app/process/rose-app.conf create mode 100644 esmvaltool/utils/recipe_test_workflow/flow.cylc create mode 100644 esmvaltool/utils/recipe_test_workflow/meta/rose-meta.conf create mode 100644 esmvaltool/utils/recipe_test_workflow/opt/rose-suite-jasmin.conf create mode 100644 esmvaltool/utils/recipe_test_workflow/opt/rose-suite-metoffice.conf create mode 100644 esmvaltool/utils/recipe_test_workflow/rose-suite.conf create mode 100644 esmvaltool/utils/recipe_test_workflow/rose-suite.info create mode 100755 esmvaltool/utils/recipe_test_workflow/site/jasmin-env create mode 100644 esmvaltool/utils/recipe_test_workflow/site/jasmin.cylc create mode 100755 esmvaltool/utils/recipe_test_workflow/site/metoffice-env create mode 100644 esmvaltool/utils/recipe_test_workflow/site/metoffice.cylc diff --git a/.codacy.yml b/.codacy.yml index 06a0ea342f..afe979f5c7 100644 --- a/.codacy.yml +++ b/.codacy.yml @@ -21,5 +21,6 @@ engines: exclude_paths: [ 'doc/sphinx/**', 'esmvaltool/cmor/tables/**', - 'tests/**' + 'tests/**', + 'esmvaltool/utils/recipe_test_workflow/app/configure/bin/test_configure.py' ] diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 2086d60173..3478d469b4 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,2 +1,3 @@ esmvaltool/cmorizers @ESMValGroup/obs-maintainers .github/workflows @valeriupredoi +esmvaltool/utils/recipe_test_workflow/ @alistairsellar @ehogan diff --git a/.github/workflows/check-rtw.yml b/.github/workflows/check-rtw.yml new file mode 100644 index 0000000000..611601dfd7 --- /dev/null +++ b/.github/workflows/check-rtw.yml @@ -0,0 +1,83 @@ +# This workflow performs various validation steps for Cylc and Rose. +name: Check Recipe Test Workflow (RTW) + +# Controls when the action will run +on: + # Triggers the workflow on push events + push: + paths: +# - esmvaltool/utils/recipe_test_workflow/** + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +# Common variables are defined here +env: + RTW_ROOT_DIR: esmvaltool/utils/recipe_test_workflow + +# Required shell entrypoint to have properly configured bash shell +defaults: + run: + shell: bash -l {0} + +# A workflow run is made up of one or more jobs that can run +# sequentially or in parallel +jobs: + # This workflow contains a single job called "check-rtw" + check-rtw: + # The type of runner that the job will run on + runs-on: ubuntu-latest + + # Steps represent a sequence of tasks that will be executed as part + # of the job + steps: + # Checks-out your repository under $GITHUB_WORKSPACE, so your job + # can access it + - uses: actions/checkout@v4 + - uses: conda-incubator/setup-miniconda@v3 + with: + miniforge-version: "latest" + miniforge-variant: Miniforge3 + use-mamba: true + conda-remove-defaults: "true" + + - name: Install Cylc and Rose + run: conda install cylc-flow>=8.2 cylc-rose metomi-rose + + - name: Check current environment + run: conda list + + - name: Validate Cylc workflow + run: | + cd ${RTW_ROOT_DIR} + cylc validate . -O metoffice + + - name: Run Cylc configuration linter + run: | + cd ${RTW_ROOT_DIR} + cylc lint + + - name: Validate format of Rose configuration files + run: | + cd ${RTW_ROOT_DIR} + output="$(rose config-dump)" + msg="Run 'rose config-dump' to re-dump the Rose configuration files" + msg="${msg} in the common format, then commit the changes." + # The '-z' option returns true if 'output' is empty. + if [[ -z "${output}" ]]; then true; else echo "${msg}" && exit 1; fi + + - name: Validate Rose configuration metadata + run: | + cd ${RTW_ROOT_DIR} + rose metadata-check -C meta/ + + - name: Run Rose configuration validation macros + run: | + cd ${RTW_ROOT_DIR} + rose macro -V + + - name: Lint shell scripts + run: | + cd ${RTW_ROOT_DIR} + output=$(find . -name "*.sh" -exec shellcheck {} \;) + if [ "$output" ]; then echo "${output}" && exit 1; fi diff --git a/.zenodo.json b/.zenodo.json index c087c4ae21..be799a9dc1 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -81,13 +81,17 @@ "name": "Berg, Peter", "orcid": "0000-0002-1469-2568" }, + { + "affiliation": "Met Office, UK", + "name": "Billows, Chris" + }, { "affiliation": "DLR, Germany", "name": "Bock, Lisa", "orcid": "0000-0001-7058-5938" }, { - "affiliation": "MetOffice, UK", + "affiliation": "Met Office, UK", "name": "Bodas-Salcedo, Alejandro", "orcid": "0000-0002-7890-2536" }, @@ -142,7 +146,7 @@ "name": "Docquier, David" }, { - "affiliation": "MetOffice, UK", + "affiliation": "Met Office, UK", "name": "Dreyer, Laura" }, { @@ -150,13 +154,21 @@ "name": "Ehbrecht, Carsten" }, { - "affiliation": "MetOffice, UK", + "affiliation": "Met Office, UK", "name": "Earnshaw, Paul" }, + { + "affiliation": "Met Office, UK", + "name": "Geddes, Theo" + }, { "affiliation": "University of Bremen, Germany", "name": "Gier, Bettina" }, + { + "affiliation": "Met Office, UK", + "name": "Gillett, Ed" + }, { "affiliation": "BSC, Spain", "name": "Gonzalez-Reviriego, Nube", @@ -191,6 +203,10 @@ "name": "Heuer, Helge", "orcid": "0000-0003-2411-7150" }, + { + "affiliation": "Met Office, UK", + "name": "Hogan, Emma" + }, { "affiliation": "BSC, Spain", "name": "Hunter, Alasdair", @@ -227,7 +243,7 @@ "orcid": "0000-0001-6085-5914" }, { - "affiliation": "MetOffice, UK", + "affiliation": "Met Office, UK", "name": "Little, Bill" }, { @@ -279,7 +295,7 @@ "name": "Sandstad, Marit" }, { - "affiliation": "MetOffice, UK", + "affiliation": "Met Office, UK", "name": "Sellar, Alistair" }, { @@ -305,6 +321,10 @@ "name": "Swaminathan, Ranjini", "orcid": "0000-0001-5853-2673" }, + { + "affiliation": "Met Office, UK", + "name": "Tomkins, Katherine" + }, { "affiliation": "BSC, Spain", "name": "Torralba, Verónica" @@ -387,7 +407,7 @@ "orcid": "0000-0003-3780-0784" }, { - "affiliation": "MetOffice, UK", + "affiliation": "Met Office, UK", "name": "Munday, Gregory", "orcid": "0000-0003-4750-9923" } diff --git a/CITATION.cff b/CITATION.cff index 1934c36ef1..ab158d2436 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -85,13 +85,17 @@ authors: family-names: Berg given-names: Peter orcid: "https://orcid.org/0000-0002-1469-2568" + - + affiliation: "Met Office, UK" + family-names: Billows + given-names: Chris - affiliation: "DLR, Germany" family-names: Bock given-names: Lisa orcid: "https://orcid.org/0000-0001-7058-5938" - - affiliation: "MetOffice, UK" + affiliation: "Met Office, UK" family-names: Bodas-Salcedo given-names: Alejandro orcid: "https://orcid.org/0000-0002-7890-2536" @@ -146,7 +150,7 @@ authors: family-names: Docquier given-names: David - - affiliation: "MetOffice, UK" + affiliation: "Met Office, UK" family-names: Dreyer given-names: Laura - @@ -154,13 +158,21 @@ authors: family-names: Ehbrecht given-names: Carsten - - affiliation: "MetOffice, UK" + affiliation: "Met Office, UK" family-names: Earnshaw given-names: Paul + - + affiliation: "Met Office, UK" + family-names: Geddes + given-names: Theo - affiliation: "University of Bremen, Germany" family-names: Gier given-names: Bettina + - + affiliation: "Met Office, UK" + family-names: Gillett + given-names: Ed - affiliation: "BSC, Spain" family-names: Gonzalez-Reviriego @@ -196,6 +208,10 @@ authors: family-names: Heuer given-names: Helge orcid: "https://orcid.org/0000-0003-2411-7150" + - + affiliation: "Met Office, UK" + family-names: Hogan + given-names: Emma - affiliation: "BSC, Spain" family-names: Hunter @@ -232,7 +248,7 @@ authors: given-names: Valerio orcid: "https://orcid.org/0000-0001-6085-5914" - - affiliation: "MetOffice, UK" + affiliation: "Met Office, UK" family-names: Little given-names: Bill - @@ -289,7 +305,7 @@ authors: family-names: Sandstad given-names: Marit - - affiliation: "MetOffice, UK" + affiliation: "Met Office, UK" family-names: Sellar given-names: Alistair - @@ -315,6 +331,10 @@ authors: family-names: Swaminathan given-names: Ranjini orcid: "https://orcid.org/0000-0001-5853-2673" + - + affiliation: "Met Office, UK" + family-names: Tomkins + given-names: Katherine - affiliation: "BSC, Spain" family-names: Torralba @@ -396,8 +416,8 @@ authors: family-names: Bonnet given-names: Pauline orcid: "https://orcid.org/0000-0003-3780-0784" - - - affiliation: "MetOffice, UK" + - + affiliation: "Met Office, UK" family-names: Munday given-names: Gregory orcid: "https://orcid.org/0000-0003-4750-9923" diff --git a/doc/sphinx/source/gensidebar.py b/doc/sphinx/source/gensidebar.py index 970722ff0a..f8b766ab7d 100644 --- a/doc/sphinx/source/gensidebar.py +++ b/doc/sphinx/source/gensidebar.py @@ -65,7 +65,7 @@ def _header(project, text): _write("esmvaltool", "Obtaining input data", "input") _write("esmvaltool", "Making a recipe or diagnostic", "develop/index") _write("esmvaltool", "Contributing to the community", "community/index") - _write("esmvaltool", "Utilities", "utils") + _write("esmvaltool", "Utilities", "utils/utils") _write("esmvaltool", "Diagnostics API Reference", "api/esmvaltool") _write("esmvaltool", "Frequently Asked Questions", "faq") _write("esmvaltool", "Changelog", "changelog") diff --git a/doc/sphinx/source/utils/RTW/about.rst b/doc/sphinx/source/utils/RTW/about.rst new file mode 100644 index 0000000000..62883fe2e1 --- /dev/null +++ b/doc/sphinx/source/utils/RTW/about.rst @@ -0,0 +1,14 @@ +***** +About +***** + +.. include:: common.txt + +The Recipe Test Workflow (|RTW|) is a workflow that is used to regularly run +recipes so issues can be discovered during the development process sooner +rather than later. + +|Cylc| v8 and |Rose| v2 are used as the workflow engine and application +configuration system for the |RTW|, respectively. |Cylc| and |Rose| are not +included in the ESMValTool environment as they are typically already centrally +installed at sites e.g. JASMIN and the Met Office. diff --git a/doc/sphinx/source/utils/RTW/add_a_recipe.rst b/doc/sphinx/source/utils/RTW/add_a_recipe.rst new file mode 100644 index 0000000000..6e495e1f1c --- /dev/null +++ b/doc/sphinx/source/utils/RTW/add_a_recipe.rst @@ -0,0 +1,118 @@ +How to add a recipe to the |RTW| +================================ + +.. include:: common.txt + +.. note:: + Before you follow these steps to add your recipe, you must be able to + successfully run the recipe with the latest version of ESMValTool on the + compute server you use at your site, as detailed by the ``platform`` option + in the ``[[COMPUTE]]`` section in the site-specific ``.cylc`` file in the + ``esmvaltool/utils/recipe_test_workflow/site/`` directory. + +#. Open a `new ESMValTool issue`_ on GitHub, assign yourself to the issue, and + add the ``Recipe Test Workflow (RTW)`` label to the issue, see + `ESMValTool issue #3663`_ for an example. + +#. Create a branch. + +#. Obtain the duration and memory usage of the recipe from the messages printed + to screen, or at the end of the ``run/main_log.txt`` file in the recipe + output directory after running your recipe on the compute cluster you use at + your site; these messages will look something like:: + + YYYY-MM-DD HH:MM:SS:sss UTC [12345] INFO Time for running the recipe was: 0:02:13.334742 + YYYY-MM-DD HH:MM:SS:sss UTC [12345] INFO Maximum memory used (estimate): 2.4 GB + [...] + YYYY-MM-DD HH:MM:SS:sss UTC [12345] INFO Run was successful + +#. Add the recipe to the ``[task parameters]`` section in the + ``esmvaltool/utils/recipe_test_workflow/flow.cylc`` file. + + .. hint:: + If the recipe takes less than 10 minutes to run then it should be added + to the ``fast`` option. Recipes that take longer than ten minutes should + be added to the ``medium`` option. + + .. hint:: + The line added should follow the format of ``recipe_new_recipe, \``, + unless the line is the last one in the list, in which case the line added + should follow the format of ``recipe_new_recipe``. + +#. If the duration of the recipe is larger than the value specified by the + ``execution time limit`` option in the ``[[COMPUTE]]`` section in the + aforementioned site-specific ``.cylc`` file, and / or the memory usage of + the recipe is larger than the value specified by the ``--mem`` option in the + ``[[[directives]]]`` section in the ``[[COMPUTE]]`` section, add a section + (in alphabetical order) to this file as shown below (round the duration to + the nearest second):: + + [[process]] + # Actual: 0m31s, 2.5 GB on 2024-04-08. + execution time limit = PT2M + [[[directives]]] + --mem = 3G + + .. hint:: + The ``fast`` key in the example task definition above + (``[[process]]``) should match name of the + option the recipe was added to in the ``[task parameters]`` section in + the ``esmvaltool/utils/recipe_test_workflow/flow.cylc`` file + + .. hint:: + Set the ``execution time limit`` to 10-20% more than the actual duration. + For actual durations of up to ``1m45s``, set the ``execution time limit`` + to ``PT2M`` (2 minutes). + + .. hint:: + Try not to regularly waste more than 500 MiB in memory usage. Typically, + rounding the actual memory usage up to the nearest integer is acceptable. + +#. Stop any running ``recipe_test_workflow`` workflows:: + + cylc stop recipe_test_workflow/* + +#. Run the |RTW|, as detailed in the :ref:`quick_start_guide`; it is expected + that the ``compare`` task will fail. + +#. Update the Known Good Outputs (|KGOs|): + + * Recursively copy the recipe output directory (i.e. + ``recipe___