diff --git a/.circleci/config.yml b/.circleci/config.yml index ad43c7f495..5be43d58c6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -27,6 +27,7 @@ commands: name: Log versions command: | . /opt/conda/etc/profile.d/conda.sh + conda env export --name base > /logs/base_environment.yml conda activate esmvaltool esmvaltool version dpkg -l > /logs/versions.txt @@ -40,6 +41,7 @@ commands: command: | . /opt/conda/etc/profile.d/conda.sh conda activate esmvaltool + mamba --version pytest -n 4 --junitxml=test-reports/report.xml esmvaltool version esmvaltool -- --help @@ -214,7 +216,7 @@ jobs: conda activate esmvaltool mkdir -p ~/climate_data esmvaltool config get_config_user - echo "offline: false" >> ~/.esmvaltool/config-user.yml + echo "search_esgf: when_missing" >> ~/.esmvaltool/config-user.yml cat ~/.esmvaltool/config-user.yml for recipe in esmvaltool/recipes/testing/recipe_*.yml; do esmvaltool run "$recipe" @@ -232,7 +234,7 @@ jobs: # Test building documentation docker: - image: condaforge/mambaforge:latest - resource_class: small + resource_class: medium steps: - checkout - run: @@ -265,7 +267,7 @@ jobs: # Install prerequisites mkdir /logs # Install ESMValTool in a new conda environment - mamba create -y --name esmvaltool -c conda-forge esmvaltool julia 'python=3.10' >> /logs/conda.txt 2>&1 + mamba create -y --name esmvaltool -c conda-forge esmvaltool julia 'python=3.11' >> /logs/conda.txt 2>&1 # Activate the environment set +x; conda activate esmvaltool; set -x # install the Julia dependencies diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index e1aaa42272..2086d60173 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,2 +1,2 @@ -esmvaltool/cmorizers @remi-kazeroni +esmvaltool/cmorizers @ESMValGroup/obs-maintainers .github/workflows @valeriupredoi diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 83cf97eb4c..5041b22420 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -62,6 +62,7 @@ It is the responsibility of the author to make sure the pull request is ready to - [ ] [🛠][1] [Documentation](https://docs.esmvaltool.org/en/latest/community/dataset.html#dataset-documentation) is available - [ ] [🛠][1] The dataset has been [added to the CMOR check recipe](https://docs.esmvaltool.org/en/latest/community/dataset.html#testing) +- [ ] [🛠][1] The dataset has been added to the shared [data pools](https://docs.esmvaltool.org/en/latest/community/dataset.html#cmorized-data) of DKRZ and Jasmin by the @ESMValGroup/OBS-maintainers team - [ ] [🧪][2] Numbers and units of the data look [physically meaningful](https://docs.esmvaltool.org/en/latest/community/dataset.html#scientific-sanity-check) *** diff --git a/.github/workflows/create-condalock-file.yml b/.github/workflows/create-condalock-file.yml index 34ec0aada7..9ab4443cd7 100644 --- a/.github/workflows/create-condalock-file.yml +++ b/.github/workflows/create-condalock-file.yml @@ -6,10 +6,15 @@ on: # and hence lots of automated PRs # push: # branches: - # - condalock-update + # - main schedule: - cron: '0 4 */10 * *' +# Required shell entrypoint to have properly configured bash shell +defaults: + run: + shell: bash -l {0} + jobs: create-lock-file: name: Create conda lock file for latest Python @@ -27,50 +32,49 @@ jobs: miniforge-variant: Mambaforge use-mamba: true - name: Show conda config - shell: bash -l {0} run: | conda update -n base -c conda-forge conda - conda info - conda list + conda --version + # setup-miniconda@v2 installs an old conda and mamba + # forcing a modern mamba updates both mamba and conda + conda install -c conda-forge "mamba>=1.4.8" conda config --show-sources conda config --show + conda --version + mamba --version - name: Python info - shell: bash -l {0} run: | which python python --version - name: Install conda-lock - shell: bash -l {0} - run: mamba install -y conda-lock + run: mamba install -y -c conda-forge conda-lock - name: Check version of conda-lock - shell: bash -l {0} run: conda-lock --version - name: Create conda lock file for linux-64 - shell: bash -l {0} run: conda-lock lock --platform linux-64 -f environment.yml --mamba --kind explicit - - shell: bash -l {0} + - name: Show conda version again run: conda --version - - shell: bash -l {0} - run: which python - - shell: bash -l {0} - run: python -V - - shell: bash -l {0} + - name: Show Python exec and version again + run: | + which python + python -V + - name: Create conda-lock environment run: conda create --name esmvaltool-fromlock --file conda-linux-64.lock - - shell: bash -l {0} + - name: Install pip run: mamba install -y pip - - shell: bash -l {0} - run: which python - - shell: bash -l {0} - run: pip --version - - shell: bash -l {0} + - name: Check Python and pip versions post pip-install + run: | + which python + pip --version + - name: Install ESMValTool run: pip install -e .[develop] - - shell: bash -l {0} + - name: Check ESMValTool version run: esmvaltool --help - - shell: bash -l {0} + - name: Check ESMValTool help run: esmvaltool version - - shell: bash -l {0} + - name: Run flake8 run: flake8 - - shell: bash -l {0} + - name: Run tests run: pytest -n 2 -m "not installation" # Automated PR # see https://github.com/marketplace/actions/create-pull-request diff --git a/.github/workflows/install-from-conda.yml b/.github/workflows/install-from-conda.yml index 3696d34e86..55897e7fe4 100644 --- a/.github/workflows/install-from-conda.yml +++ b/.github/workflows/install-from-conda.yml @@ -9,6 +9,11 @@ on: schedule: - cron: '0 4 * * *' +# Required shell entrypoint to have properly configured bash shell +defaults: + run: + shell: bash -l {0} + jobs: linux: runs-on: "ubuntu-latest" @@ -24,20 +29,16 @@ jobs: miniforge-version: "latest" miniforge-variant: Mambaforge use-mamba: true - - shell: bash -l {0} - run: mkdir -p conda_install_linux_artifacts_python_${{ matrix.python-version }} + - run: mkdir -p conda_install_linux_artifacts_python_${{ matrix.python-version }} - name: Record versions - shell: bash -l {0} run: | mamba --version 2>&1 | tee conda_install_linux_artifacts_python_${{ matrix.python-version }}/conda_version.txt which conda 2>&1 | tee conda_install_linux_artifacts_python_${{ matrix.python-version }}/conda_path.txt which mamba 2>&1 | tee -a conda_install_linux_artifacts_python_${{ matrix.python-version }}/conda_path.txt python -V 2>&1 | tee conda_install_linux_artifacts_python_${{ matrix.python-version }}/python_version.txt - name: Install ESMValTool - shell: bash -l {0} run: mamba install esmvaltool 2>&1 | tee conda_install_linux_artifacts_python_${{ matrix.python-version }}/install.txt - name: Verify installation - shell: bash -l {0} run: | esmvaltool --help esmvaltool version 2>&1 | tee conda_install_linux_artifacts_python_${{ matrix.python-version }}/version.txt @@ -65,31 +66,20 @@ jobs: # python-version: ${{ matrix.python-version }} # miniconda-version: "latest" # channels: conda-forge -# - shell: bash -l {0} -# run: mkdir -p conda_install_osx_artifacts_python_${{ matrix.python-version }} -# - shell: bash -l {0} -# run: conda --version 2>&1 | tee conda_install_osx_artifacts_python_${{ matrix.python-version }}/conda_version.txt -# - shell: bash -l {0} -# run: which conda 2>&1 | tee conda_install_osx_artifacts_python_${{ matrix.python-version }}/conda_path.txt -# - shell: bash -l {0} -# run: python -V 2>&1 | tee conda_install_osx_artifacts_python_${{ matrix.python-version }}/python_version.txt +# - run: mkdir -p conda_install_osx_artifacts_python_${{ matrix.python-version }} +# - run: conda --version 2>&1 | tee conda_install_osx_artifacts_python_${{ matrix.python-version }}/conda_version.txt +# - run: which conda 2>&1 | tee conda_install_osx_artifacts_python_${{ matrix.python-version }}/conda_path.txt +# - run: python -V 2>&1 | tee conda_install_osx_artifacts_python_${{ matrix.python-version }}/python_version.txt # # ncurses needs to be from conda-forge and not main channel # # for now it's turned off since we're not testing R/Julia installs -# # - shell: bash -l {0} -# # run: conda uninstall -y ncurses -# # - shell: bash -l {0} -# # run: conda list ncurses -# # - shell: bash -l {0} -# # run: conda install -y conda-forge::ncurses -# # - shell: bash -l {0} -# # run: conda list ncurses -# - shell: bash -l {0} -# #run: conda install esmvaltool --no-update-deps 2>&1 | tee conda_install_osx_artifacts_python_${{ matrix.python-version }}/install.txt -# run: conda install esmvaltool-python esmvaltool-ncl 2>&1 | tee conda_install_osx_artifacts_python_${{ matrix.python-version }}/install.txt -# - shell: bash -l {0} -# run: esmvaltool --help -# - shell: bash -l {0} -# run: esmvaltool version 2>&1 | tee conda_install_osx_artifacts_python_${{ matrix.python-version }}/version.txt +# # - run: conda uninstall -y ncurses +# # - run: conda list ncurses +# # - run: conda install -y conda-forge::ncurses +# # - run: conda list ncurses +# - run: conda install esmvaltool --no-update-deps 2>&1 | tee conda_install_osx_artifacts_python_${{ matrix.python-version }}/install.txt +# - run: conda install esmvaltool-python esmvaltool-ncl 2>&1 | tee conda_install_osx_artifacts_python_${{ matrix.python-version }}/install.txt +# - run: esmvaltool --help +# - run: esmvaltool version 2>&1 | tee conda_install_osx_artifacts_python_${{ matrix.python-version }}/version.txt # - name: Upload artifacts # if: ${{ always() }} # upload artifacts even if fail # uses: actions/upload-artifact@v2 diff --git a/.github/workflows/install-from-condalock-file.yml b/.github/workflows/install-from-condalock-file.yml index 99092069d8..a209c06f32 100644 --- a/.github/workflows/install-from-condalock-file.yml +++ b/.github/workflows/install-from-condalock-file.yml @@ -20,6 +20,11 @@ on: schedule: - cron: '0 0 * * *' +# Required shell entrypoint to have properly configured bash shell +defaults: + run: + shell: bash -l {0} + jobs: linux: runs-on: "ubuntu-latest" @@ -38,68 +43,21 @@ jobs: python-version: ${{ matrix.python-version }} miniconda-version: "latest" channels: conda-forge - - shell: bash -l {0} - run: mkdir -p source_install_linux_artifacts_python_${{ matrix.python-version }} - - shell: bash -l {0} - run: conda --version 2>&1 | tee source_install_linux_artifacts_python_${{ matrix.python-version }}/conda_version.txt - - shell: bash -l {0} - run: which python - - shell: bash -l {0} - run: python -V 2>&1 | tee source_install_linux_artifacts_python_${{ matrix.python-version }}/python_version.txt - - shell: bash -l {0} - run: conda create --name esmvaltool-fromlock --file conda-linux-64.lock - - shell: bash -l {0} - run: which python - - shell: bash -l {0} - run: pip --version - - shell: bash -l {0} - run: pip install -e .[develop] - - shell: bash -l {0} - run: esmvaltool --help - - shell: bash -l {0} - run: esmvaltool version 2>&1 | tee source_install_linux_artifacts_python_${{ matrix.python-version }}/version.txt - - shell: bash -l {0} - run: flake8 - - shell: bash -l {0} - run: pytest -n 2 -m "not installation" + - run: mkdir -p source_install_linux_artifacts_python_${{ matrix.python-version }} + - run: conda --version 2>&1 | tee source_install_linux_artifacts_python_${{ matrix.python-version }}/conda_version.txt + - run: which python + - run: python -V 2>&1 | tee source_install_linux_artifacts_python_${{ matrix.python-version }}/python_version.txt + - run: conda create --name esmvaltool-fromlock --file conda-linux-64.lock + - run: which python + - run: pip --version + - run: pip install -e .[develop] + - run: esmvaltool --help + - run: esmvaltool version 2>&1 | tee source_install_linux_artifacts_python_${{ matrix.python-version }}/version.txt + - run: flake8 + - run: pytest -n 2 -m "not installation" - name: Upload artifacts if: ${{ always() }} # upload artifacts even if fail uses: actions/upload-artifact@v2 with: name: Source_Install_Linux_python_${{ matrix.python-version }} path: source_install_linux_artifacts_python_${{ matrix.python-version }} - -# osx: -# runs-on: "macos-latest" -# strategy: -# matrix: -# python-version: ["3.9", "3.10", "3.11"] -# fail-fast: false -# name: OSX Python ${{ matrix.python-version }} -# steps: -# - uses: actions/checkout@v2 -# - uses: conda-incubator/setup-miniconda@v2 -# with: -# activate-environment: esmvaltool -# environment-file: environment.yml -# python-version: ${{ matrix.python-version }} -# miniconda-version: "latest" -# channels: conda-forge -# - shell: bash -l {0} -# run: mkdir -p source_install_osx_artifacts_python_${{ matrix.python-version }} -# - shell: bash -l {0} -# run: conda --version 2>&1 | tee source_install_osx_artifacts_python_${{ matrix.python-version }}/conda_version.txt -# - shell: bash -l {0} -# run: python -V 2>&1 | tee source_install_osx_artifacts_python_${{ matrix.python-version }}/python_version.txt -# - shell: bash -l {0} -# run: pip install -e .[develop] 2>&1 | tee source_install_osx_artifacts_python_${{ matrix.python-version }}/install.txt -# - shell: bash -l {0} -# run: esmvaltool --help -# - shell: bash -l {0} -# run: esmvaltool version 2>&1 | tee source_install_osx_artifacts_python_${{ matrix.python-version }}/version.txt -# - name: Upload artifacts -# if: ${{ always() }} # upload artifacts even if fail -# uses: actions/upload-artifact@v2 -# with: -# name: Source_Install_OSX_python_${{ matrix.python-version }} -# path: source_install_osx_artifacts_python_${{ matrix.python-version }} diff --git a/.github/workflows/install-from-source.yml b/.github/workflows/install-from-source.yml index 8c86e273ac..2e24b8f049 100644 --- a/.github/workflows/install-from-source.yml +++ b/.github/workflows/install-from-source.yml @@ -9,6 +9,11 @@ on: schedule: - cron: '0 0 * * *' +# Required shell entrypoint to have properly configured bash shell +defaults: + run: + shell: bash -l {0} + jobs: linux: runs-on: "ubuntu-latest" @@ -29,18 +34,14 @@ jobs: miniforge-version: "latest" miniforge-variant: Mambaforge use-mamba: true - - shell: bash -l {0} - run: mkdir -p source_install_linux_artifacts_python_${{ matrix.python-version }} + - run: mkdir -p source_install_linux_artifacts_python_${{ matrix.python-version }} - name: Record versions - shell: bash -l {0} run: | mamba --version 2>&1 | tee source_install_linux_artifacts_python_${{ matrix.python-version }}/conda_version.txt python -V 2>&1 | tee source_install_linux_artifacts_python_${{ matrix.python-version }}/python_version.txt - name: Install - shell: bash -l {0} run: pip install -e .[develop] 2>&1 | tee source_install_linux_artifacts_python_${{ matrix.python-version }}/install.txt - name: Verify installation - shell: bash -l {0} run: | esmvaltool --help esmvaltool version 2>&1 | tee source_install_linux_artifacts_python_${{ matrix.python-version }}/version.txt @@ -70,18 +71,12 @@ jobs: # python-version: ${{ matrix.python-version }} # miniconda-version: "latest" # channels: conda-forge -# - shell: bash -l {0} -# run: mkdir -p source_install_osx_artifacts_python_${{ matrix.python-version }} -# - shell: bash -l {0} -# run: conda --version 2>&1 | tee source_install_osx_artifacts_python_${{ matrix.python-version }}/conda_version.txt -# - shell: bash -l {0} -# run: python -V 2>&1 | tee source_install_osx_artifacts_python_${{ matrix.python-version }}/python_version.txt -# - shell: bash -l {0} -# run: pip install -e .[develop] 2>&1 | tee source_install_osx_artifacts_python_${{ matrix.python-version }}/install.txt -# - shell: bash -l {0} -# run: esmvaltool --help -# - shell: bash -l {0} -# run: esmvaltool version 2>&1 | tee source_install_osx_artifacts_python_${{ matrix.python-version }}/version.txt +# - run: mkdir -p source_install_osx_artifacts_python_${{ matrix.python-version }} +# - run: conda --version 2>&1 | tee source_install_osx_artifacts_python_${{ matrix.python-version }}/conda_version.txt +# - run: python -V 2>&1 | tee source_install_osx_artifacts_python_${{ matrix.python-version }}/python_version.txt +# - run: pip install -e .[develop] 2>&1 | tee source_install_osx_artifacts_python_${{ matrix.python-version }}/install.txt +# - run: esmvaltool --help +# - run: esmvaltool version 2>&1 | tee source_install_osx_artifacts_python_${{ matrix.python-version }}/version.txt # - name: Upload artifacts # if: ${{ always() }} # upload artifacts even if fail # uses: actions/upload-artifact@v2 diff --git a/.github/workflows/run-tests-monitor.yml b/.github/workflows/run-tests-monitor.yml index e23c7c0daf..1efe54a66a 100644 --- a/.github/workflows/run-tests-monitor.yml +++ b/.github/workflows/run-tests-monitor.yml @@ -12,6 +12,11 @@ on: schedule: - cron: '0 0 * * *' # nightly +# Required shell entrypoint to have properly configured bash shell +defaults: + run: + shell: bash -l {0} + jobs: linux: runs-on: "ubuntu-latest" @@ -32,29 +37,22 @@ jobs: miniforge-version: "latest" miniforge-variant: Mambaforge use-mamba: true - - shell: bash -l {0} - run: mkdir -p test_linux_artifacts_python_${{ matrix.python-version }} + - run: mkdir -p test_linux_artifacts_python_${{ matrix.python-version }} - name: Record versions - shell: bash -l {0} run: | mamba --version 2>&1 | tee test_linux_artifacts_python_${{ matrix.python-version }}/conda_version.txt python -V 2>&1 | tee test_linux_artifacts_python_${{ matrix.python-version }}/python_version.txt - name: Install pytest-monitor - shell: bash -l {0} run: pip install pytest-monitor - name: Install ESMValTool - shell: bash -l {0} run: pip install -e .[develop] 2>&1 | tee test_linux_artifacts_python_${{ matrix.python-version }}/install.txt - name: Install Julia dependencies - shell: bash -l {0} run: esmvaltool install Julia - name: Run tests - shell: bash -l {0} run: > pytest -n 2 -m "not installation" --db ../.pymon 2>&1 | tee test_linux_artifacts_python_${{ matrix.python-version }}/test_report.txt - name: Parse monitor information - shell: bash -l {0} run: python tests/parse_pymon.py - name: Upload artifacts if: ${{ always() }} # upload artifacts even if fail @@ -84,28 +82,22 @@ jobs: use-mamba: true # - name: Install libomp with homebrew # run: brew install libomp - - shell: bash -l {0} - run: mkdir -p test_osx_artifacts_python_${{ matrix.python-version }} + - run: mkdir -p test_osx_artifacts_python_${{ matrix.python-version }} - name: Record versions - shell: bash -l {0} run: | mamba --version 2>&1 | tee test_osx_artifacts_python_${{ matrix.python-version }}/conda_version.txt python -V 2>&1 | tee test_osx_artifacts_python_${{ matrix.python-version }}/python_version.txt - name: Install pytest-monitor - shell: bash -l {0} run: pip install pytest-monitor - name: Install ESMValTool - shell: bash -l {0} run: > pip install -e .[develop] 2>&1 | tee test_osx_artifacts_python_${{ matrix.python-version }}/install.txt - name: Run tests - shell: bash -l {0} run: > pytest -n 2 -m "not installation" --db ../.pymon 2>&1 | tee test_osx_artifacts_python_${{ matrix.python-version }}/test_report.txt - name: Parse monitor information - shell: bash -l {0} run: python tests/parse_pymon.py - name: Upload artifacts if: ${{ always() }} # upload artifacts even if fail diff --git a/.github/workflows/test-development.yml b/.github/workflows/test-development.yml index 2f115ed46e..cab6489548 100644 --- a/.github/workflows/test-development.yml +++ b/.github/workflows/test-development.yml @@ -12,9 +12,15 @@ on: push: branches: - main + - fix_recipe_filler_bkwds_incompatibility schedule: - cron: '0 0 * * *' +# Required shell entrypoint to have properly configured bash shell +defaults: + run: + shell: bash -l {0} + jobs: linux: runs-on: "ubuntu-latest" @@ -35,31 +41,24 @@ jobs: miniforge-version: "latest" miniforge-variant: Mambaforge use-mamba: true - - shell: bash -l {0} - run: mkdir -p develop_test_linux_artifacts_python_${{ matrix.python-version }} + - run: mkdir -p develop_test_linux_artifacts_python_${{ matrix.python-version }} - name: Record versions - shell: bash -l {0} run: | mamba --version 2>&1 | tee develop_test_linux_artifacts_python_${{ matrix.python-version }}/conda_version.txt python -V 2>&1 | tee develop_test_linux_artifacts_python_${{ matrix.python-version }}/python_version.txt - name: Install ESMValTool - shell: bash -l {0} run: pip install -e .[develop] 2>&1 | tee develop_test_linux_artifacts_python_${{ matrix.python-version }}/install.txt - name: Install Julia dependencies - shell: bash -l {0} run: esmvaltool install Julia - name: Install development version of ESMValCore - shell: bash -l {0} run: | cd .. git clone https://github.com/ESMValGroup/ESMValCore.git cd ESMValCore pip install -e .[develop] - name: Run flake8 - shell: bash -l {0} run: flake8 - name: Run tests - shell: bash -l {0} run: pytest -n 2 -m "not installation" 2>&1 | tee develop_test_linux_artifacts_python_${{ matrix.python-version }}/test_report.txt - name: Upload artifacts if: ${{ always() }} # upload artifacts even if fail diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 9cb266eaed..9eec648279 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -6,9 +6,15 @@ on: push: branches: - main + - fix_recipe_filler_bkwds_incompatibility schedule: - cron: '0 0 * * *' +# Required shell entrypoint to have properly configured bash shell +defaults: + run: + shell: bash -l {0} + jobs: linux: runs-on: "ubuntu-latest" @@ -29,36 +35,28 @@ jobs: miniforge-version: "latest" miniforge-variant: Mambaforge use-mamba: true - - shell: bash -l {0} - run: mkdir -p test_linux_artifacts_python_${{ matrix.python-version }} + - run: mkdir -p test_linux_artifacts_python_${{ matrix.python-version }} - name: Record versions - shell: bash -l {0} run: | mamba --version 2>&1 | tee test_linux_artifacts_python_${{ matrix.python-version }}/conda_version.txt python -V 2>&1 | tee test_linux_artifacts_python_${{ matrix.python-version }}/python_version.txt # this is how to export variables to the GITHUB var environment echo "pver0=$(python -V)" >> $GITHUB_ENV - name: Install ESMValTool - shell: bash -l {0} run: pip install -e .[develop] 2>&1 | tee test_linux_artifacts_python_${{ matrix.python-version }}/install.txt - name: Install Julia dependencies - shell: bash -l {0} run: esmvaltool install Julia - name: Export Python minor version - shell: bash -l {0} run: echo "pver1=$(python -V)" >> $GITHUB_ENV - name: Exit if Python minor version changed - shell: bash -l {0} if: ${{ env.pver1 != env.pver0}} run: | echo "Python minor version changed after Julia install" python -V exit 1 - name: Run flake8 - shell: bash -l {0} run: flake8 - name: Run tests - shell: bash -l {0} run: pytest -n 2 -m "not installation" 2>&1 | tee test_linux_artifacts_python_${{ matrix.python-version }}/test_report.txt - name: Upload artifacts if: ${{ always() }} # upload artifacts even if fail @@ -88,21 +86,16 @@ jobs: use-mamba: true # - name: Install libomp with homebrew # run: brew install libomp - - shell: bash -l {0} - run: mkdir -p test_osx_artifacts_python_${{ matrix.python-version }} + - run: mkdir -p test_osx_artifacts_python_${{ matrix.python-version }} - name: Record versions - shell: bash -l {0} run: | mamba --version 2>&1 | tee test_osx_artifacts_python_${{ matrix.python-version }}/conda_version.txt python -V 2>&1 | tee test_osx_artifacts_python_${{ matrix.python-version }}/python_version.txt - name: Install ESMValTool - shell: bash -l {0} run: pip install -e .[develop] 2>&1 | tee test_osx_artifacts_python_${{ matrix.python-version }}/install.txt - name: Run flake8 - shell: bash -l {0} run: flake8 - name: Run tests - shell: bash -l {0} run: pytest -n 2 -m "not installation" 2>&1 | tee test_osx_artifacts_python_${{ matrix.python-version }}/test_report.txt - name: Upload artifacts if: ${{ always() }} # upload artifacts even if fail diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 68b849dac3..071686d373 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -9,10 +9,18 @@ version: 2 build: os: ubuntu-22.04 tools: - python: "mambaforge-4.10" + # updated and deployed from Aug 1, 2023 + python: "mambaforge-22.9" jobs: + pre_create_environment: + # update mamba just in case + - mamba update --yes --quiet --name=base mamba 'zstd=1.5.2' + - mamba --version + - mamba list --name=base post_create_environment: + - conda run -n ${CONDA_DEFAULT_ENV} mamba list # use conda run executable wrapper to have all env variables + - conda run -n ${CONDA_DEFAULT_ENV} mamba --version - conda run -n ${CONDA_DEFAULT_ENV} pip install . --no-deps # Declare the requirements required to build your docs diff --git a/.zenodo.json b/.zenodo.json index 083fa51356..c6a731981f 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -1,382 +1,426 @@ { - "creators": [ - { - "affiliation": "NLeSC, Netherlands", - "name": "Andela, Bouwe", - "orcid": "0000-0001-9005-8940" - }, - { - "affiliation": "DLR, Germany", - "name": "Broetz, Bjoern" - }, - { - "affiliation": "PML, UK", - "name": "de Mora, Lee", - "orcid": "0000-0002-5080-3149" - }, - { - "affiliation": "NLeSC, Netherlands", - "name": "Drost, Niels", - "orcid": "0000-0001-9795-7981" - }, - { - "affiliation": "DLR, Germany", - "name": "Eyring, Veronika", - "orcid": "0000-0002-6887-4885" - }, - { - "affiliation": "AWI, Germany", - "name": "Koldunov, Nikolay", - "orcid": "0000-0002-3365-8146" - }, - { - "affiliation": "DLR, Germany", - "name": "Lauer, Axel", - "orcid": "0000-0002-9270-1044" - }, - { - "affiliation": "LMU, Germany", - "name": "Mueller, Benjamin" - }, - { - "affiliation": "URead, UK", - "name": "Predoi, Valeriu", - "orcid": "0000-0002-9729-6578" - }, - { - "affiliation": "DLR, Germany", - "name": "Righi, Mattia", - "orcid": "0000-0003-3827-5950" - }, - { - "affiliation": "DLR, Germany", - "name": "Schlund, Manuel", - "orcid": "0000-0001-5251-0158" - }, - { - "affiliation": "BSC, Spain", - "name": "Vegas-Regidor, Javier", - "orcid": "0000-0003-0096-4291" - }, - { - "affiliation": "SMHI, Sweden", - "name": "Zimmermann, Klaus" - }, - { - "affiliation": "University of Bremen, Germany", - "name": "Adeniyi, Kemisola" - }, - { - "affiliation": "ISAC-CNR, Italy", - "name": "Arnone, Enrico", - "orcid": "0000-0001-6740-5051" - }, - { - "affiliation": "BSC, Spain", - "name": "Bellprat, Omar", - "orcid": "0000-0001-6434-1793" - }, - { - "affiliation": "SMHI, Sweden", - "name": "Berg, Peter", - "orcid": "0000-0002-1469-2568" - }, - { - "affiliation": "DLR, Germany", - "name": "Bock, Lisa", - "orcid": "0000-0001-7058-5938" - }, - { - "affiliation": "BSC, Spain", - "name": "Caron, Louis-Philippe", - "orcid": "0000-0001-5221-0147" - }, - { - "affiliation": "MPI for Biogeochemistry, Germany", - "name": "Carvalhais, Nuno" - }, - { - "affiliation": "ENEA, Italy", - "name": "Cionni, Irene", - "orcid": "0000-0002-0591-9193" - }, - { - "affiliation": "BSC, Spain", - "name": "Cortesi, Nicola", - "orcid": "0000-0002-1442-9225" - }, - { - "affiliation": "ISAC-CNR, Italy", - "name": "Corti, Susanna" - }, - { - "affiliation": "ETH Zurich, Switzerland", - "name": "Crezee, Bas", - "orcid": "0000-0002-1774-1126" - }, - { - "affiliation": "ETH Zurich, Switzerland", - "name": "Davin, Edouard Leopold", - "orcid": "0000-0003-3322-9330" - }, - { - "affiliation": "ISAC-CNR, Italy", - "name": "Davini, Paolo", - "orcid": "0000-0003-3389-7849" - }, - { - "affiliation": "NCAR, USA", - "name": "Deser, Clara" - }, - { - "affiliation": "NLeSC, Netherlands", - "name": "Diblen, Faruk" - }, - { - "affiliation": "UCLouvain, Belgium", - "name": "Docquier, David" - }, - { - "affiliation": "MetOffice, UK", - "name": "Dreyer, Laura" - }, - { - "affiliation": "DKRZ, Germany", - "name": "Ehbrecht, Carsten" - }, - { - "affiliation": "MetOffice, UK", - "name": "Earnshaw, Paul" - }, - { - "affiliation": "University of Bremen, Germany", - "name": "Gier, Bettina" - }, - { - "affiliation": "BSC, Spain", - "name": "Gonzalez-Reviriego, Nube", - "orcid": "0000-0002-5919-6701" - }, - { - "affiliation": "University of Arizona, USA", - "name": "Goodman, Paul" - }, - { - "affiliation": "HZG, Germany", - "name": "Hagemann, Stefan", - "orcid": "0000-0001-5444-2945" - }, - { - "affiliation": "ISAC-CNR, Italy", - "name": "von Hardenberg, Jost", - "orcid": "0000-0002-5312-8070" - }, - { - "affiliation": "DLR, Germany", - "name": "Hassler, Birgit", - "orcid": "0000-0003-2724-709X" - }, - { - "affiliation": "BSC, Spain", - "name": "Hunter, Alasdair", - "orcid": "0000-0001-8365-3709" - }, - { - "affiliation": "FUB, Germany", - "name": "Kadow, Christopher" - }, - { - "affiliation": "DKRZ, Germany", - "name": "Kindermann, Stephan", - "orcid": "0000-0001-9335-1093" - }, - { - "affiliation": "MPI for Biogeochemistry, Germany", - "name": "Koirala, Sujan" - }, - { - "affiliation": "BSC, Spain", - "name": "Lledó, Llorenç" - }, - { - "affiliation": "ETH Zurich, Switzerland", - "name": "Lejeune, Quentin" - }, - { - "affiliation": "University of Hamburg, German", - "name": "Lembo, Valerio", - "orcid": "0000-0001-6085-5914" - }, - { - "affiliation": "MetOffice, UK", - "name": "Little, Bill" - }, - { - "affiliation": "BSC, Spain", - "name": "Loosveldt-Tomas, Saskia" - }, - { - "affiliation": "ETH Zurich, Switzerland", - "name": "Lorenz, Ruth", - "orcid": "0000-0002-3986-1268" - }, - { - "affiliation": "CMCC, Italy", - "name": "Lovato, Tomas", - "orcid": "0000-0002-5188-6767" - }, - { - "affiliation": "University of Hamburg, German", - "name": "Lucarini, Valerio" - }, - { - "affiliation": "UCLouvain, Belgium", - "name": "Massonnet, François" - }, - { - "affiliation": "NIBIO, Norway", - "name": "Mohr, Christian Wilhelm", - "orcid": "0000-0003-2656-1802" - }, - { - "affiliation": "BSC, Spain", - "name": "Moreno-Chamarro, Eduardo" - }, - { - "affiliation": "University of Arizona, USA", - "name": "Amarjiit, Pandde" - }, - { - "affiliation": "BSC, Spain", - "name": "Pérez-Zanón, Núria" - }, - { - "affiliation": "NCAR, USA", - "name": "Phillips, Adam", - "orcid": "0000-0003-4859-8585" - }, - { - "affiliation": "University of Arizona, USA", - "name": "Russell, Joellen" - }, - { - "affiliation": "CICERO, Norway", - "name": "Sandstad, Marit" - }, - { - "affiliation": "MetOffice, UK", - "name": "Sellar, Alistair" - }, - { - "affiliation": "DLR, Germany", - "name": "Senftleben, Daniel" - }, - { - "affiliation": "ISMAR-CNR, Italy", - "name": "Serva, Federico", - "orcid": "0000-0002-7118-0817" - }, - { - "affiliation": "CICERO, Norway", - "name": "Sillmann, Jana" - }, - { - "affiliation": "MPI-M, Germany", - "name": "Stacke, Tobias", - "orcid": "0000-0003-4637-5337" - }, - { - "affiliation": "URead, UK", - "name": "Swaminathan, Ranjini", - "orcid": "0000-0001-5853-2673" - }, - { - "affiliation": "BSC, Spain", - "name": "Torralba, Verónica" - }, - { - "affiliation": "University of Bremen, Germany", - "name": "Weigel, Katja", - "orcid": "0000-0001-6133-7801" - }, - { - "affiliation": "University of Reading, UK", - "name": "Roberts, Charles", - "orcid": "0000-0002-1147-8961" - }, - { - "affiliation": "Netherlands eScience Center", - "name": "Kalverla, Peter", - "orcid": "0000-0002-5025-7862" - }, - { - "affiliation": "Netherlands eScience Center", - "name": "Alidoost, Sarah", - "orcid": "0000-0001-8407-6472" - }, - { - "affiliation": "Netherlands eScience Center", - "name": "Verhoeven, Stefan", - "orcid": "0000-0002-5821-2060" - }, - { - "affiliation": "Netherlands eScience Center", - "name": "Vreede, Barbara", - "orcid": "0000-0002-5023-4601" - }, - { - "affiliation": "Netherlands eScience Center", - "name": "Smeets, Stef", - "orcid": "0000-0002-5413-9038" - }, - { - "affiliation": "Netherlands eScience Center", - "name": "Soares Siqueira, Abel", - "orcid": "0000-0003-4451-281X" - }, - { - "affiliation": "DLR, Germany", - "name": "Kazeroni, Rémi", - "orcid": "0000-0001-7205-9528" - } - ], - "description": "ESMValTool: A community diagnostic and performance metrics tool for routine evaluation of Earth system models in CMIP.", - "license": { - "id": "Apache-2.0" - }, - "title": "ESMValTool", - "communities": [ - { - "identifier": "is-enes3" - }, - { - "identifier": "dlr_de" - }, - { - "identifier": "ecfunded" - }, - { - "identifier": "nlesc" - } - ], - "grants": [ - { - "id": "10.13039/501100000780::282672" - }, - { - "id": "10.13039/501100000780::641727" - }, - { - "id": "10.13039/501100000780::641816" - }, - { - "id": "10.13039/501100000780::727862" - }, - { - "id": "10.13039/501100000780::776613" - }, - { - "id": "10.13039/501100000780::824084" - } - ] + "creators": [ + { + "affiliation": "NLeSC, Netherlands", + "name": "Andela, Bouwe", + "orcid": "0000-0001-9005-8940" + }, + { + "affiliation": "DLR, Germany", + "name": "Broetz, Bjoern" + }, + { + "affiliation": "PML, UK", + "name": "de Mora, Lee", + "orcid": "0000-0002-5080-3149" + }, + { + "affiliation": "NLeSC, Netherlands", + "name": "Drost, Niels", + "orcid": "0000-0001-9795-7981" + }, + { + "affiliation": "DLR, Germany", + "name": "Eyring, Veronika", + "orcid": "0000-0002-6887-4885" + }, + { + "affiliation": "AWI, Germany", + "name": "Koldunov, Nikolay", + "orcid": "0000-0002-3365-8146" + }, + { + "affiliation": "DLR, Germany", + "name": "Lauer, Axel", + "orcid": "0000-0002-9270-1044" + }, + { + "affiliation": "LMU, Germany", + "name": "Mueller, Benjamin" + }, + { + "affiliation": "URead, UK", + "name": "Predoi, Valeriu", + "orcid": "0000-0002-9729-6578" + }, + { + "affiliation": "DLR, Germany", + "name": "Righi, Mattia", + "orcid": "0000-0003-3827-5950" + }, + { + "affiliation": "DLR, Germany", + "name": "Schlund, Manuel", + "orcid": "0000-0001-5251-0158" + }, + { + "affiliation": "BSC, Spain", + "name": "Vegas-Regidor, Javier", + "orcid": "0000-0003-0096-4291" + }, + { + "affiliation": "SMHI, Sweden", + "name": "Zimmermann, Klaus" + }, + { + "affiliation": "University of Bremen, Germany", + "name": "Adeniyi, Kemisola" + }, + { + "affiliation": "ISAC-CNR, Italy", + "name": "Arnone, Enrico", + "orcid": "0000-0001-6740-5051" + }, + { + "affiliation": "BSC, Spain", + "name": "Bellprat, Omar", + "orcid": "0000-0001-6434-1793" + }, + { + "affiliation": "SMHI, Sweden", + "name": "Berg, Peter", + "orcid": "0000-0002-1469-2568" + }, + { + "affiliation": "DLR, Germany", + "name": "Bock, Lisa", + "orcid": "0000-0001-7058-5938" + }, + { + "affiliation": "MetOffice, UK", + "name": "Bodas-Salcedo, Alejandro", + "orcid": "0000-0002-7890-2536" + }, + { + "affiliation": "BSC, Spain", + "name": "Caron, Louis-Philippe", + "orcid": "0000-0001-5221-0147" + }, + { + "affiliation": "MPI for Biogeochemistry, Germany", + "name": "Carvalhais, Nuno" + }, + { + "affiliation": "ENEA, Italy", + "name": "Cionni, Irene", + "orcid": "0000-0002-0591-9193" + }, + { + "affiliation": "BSC, Spain", + "name": "Cortesi, Nicola", + "orcid": "0000-0002-1442-9225" + }, + { + "affiliation": "ISAC-CNR, Italy", + "name": "Corti, Susanna" + }, + { + "affiliation": "ETH Zurich, Switzerland", + "name": "Crezee, Bas", + "orcid": "0000-0002-1774-1126" + }, + { + "affiliation": "ETH Zurich, Switzerland", + "name": "Davin, Edouard Leopold", + "orcid": "0000-0003-3322-9330" + }, + { + "affiliation": "ISAC-CNR, Italy", + "name": "Davini, Paolo", + "orcid": "0000-0003-3389-7849" + }, + { + "affiliation": "NCAR, USA", + "name": "Deser, Clara" + }, + { + "affiliation": "NLeSC, Netherlands", + "name": "Diblen, Faruk" + }, + { + "affiliation": "UCLouvain, Belgium", + "name": "Docquier, David" + }, + { + "affiliation": "MetOffice, UK", + "name": "Dreyer, Laura" + }, + { + "affiliation": "DKRZ, Germany", + "name": "Ehbrecht, Carsten" + }, + { + "affiliation": "MetOffice, UK", + "name": "Earnshaw, Paul" + }, + { + "affiliation": "University of Bremen, Germany", + "name": "Gier, Bettina" + }, + { + "affiliation": "BSC, Spain", + "name": "Gonzalez-Reviriego, Nube", + "orcid": "0000-0002-5919-6701" + }, + { + "affiliation": "University of Arizona, USA", + "name": "Goodman, Paul" + }, + { + "affiliation": "HZG, Germany", + "name": "Hagemann, Stefan", + "orcid": "0000-0001-5444-2945" + }, + { + "affiliation": "ISAC-CNR, Italy", + "name": "von Hardenberg, Jost", + "orcid": "0000-0002-5312-8070" + }, + { + "affiliation": "DLR, Germany", + "name": "Hassler, Birgit", + "orcid": "0000-0003-2724-709X" + }, + { + "affiliation": "DLR, Germany", + "name": "Heuer, Helge", + "orcid": "0000-0003-2411-7150" + }, + { + "affiliation": "BSC, Spain", + "name": "Hunter, Alasdair", + "orcid": "0000-0001-8365-3709" + }, + { + "affiliation": "FUB, Germany", + "name": "Kadow, Christopher" + }, + { + "affiliation": "DKRZ, Germany", + "name": "Kindermann, Stephan", + "orcid": "0000-0001-9335-1093" + }, + { + "affiliation": "MPI for Biogeochemistry, Germany", + "name": "Koirala, Sujan" + }, + { + "affiliation": "DLR, Germany", + "name": "Kuehbacher, Birgit" + }, + { + "affiliation": "BSC, Spain", + "name": "Lledó, Llorenç" + }, + { + "affiliation": "ETH Zurich, Switzerland", + "name": "Lejeune, Quentin" + }, + { + "affiliation": "University of Hamburg, German", + "name": "Lembo, Valerio", + "orcid": "0000-0001-6085-5914" + }, + { + "affiliation": "MetOffice, UK", + "name": "Little, Bill" + }, + { + "affiliation": "BSC, Spain", + "name": "Loosveldt-Tomas, Saskia" + }, + { + "affiliation": "ETH Zurich, Switzerland", + "name": "Lorenz, Ruth", + "orcid": "0000-0002-3986-1268" + }, + { + "affiliation": "CMCC, Italy", + "name": "Lovato, Tomas", + "orcid": "0000-0002-5188-6767" + }, + { + "affiliation": "University of Hamburg, German", + "name": "Lucarini, Valerio" + }, + { + "affiliation": "UCLouvain, Belgium", + "name": "Massonnet, François" + }, + { + "affiliation": "NIBIO, Norway", + "name": "Mohr, Christian Wilhelm", + "orcid": "0000-0003-2656-1802" + }, + { + "affiliation": "University of Arizona, USA", + "name": "Amarjiit, Pandde" + }, + { + "affiliation": "BSC, Spain", + "name": "Pérez-Zanón, Núria" + }, + { + "affiliation": "NCAR, USA", + "name": "Phillips, Adam", + "orcid": "0000-0003-4859-8585" + }, + { + "affiliation": "University of Arizona, USA", + "name": "Russell, Joellen" + }, + { + "affiliation": "CICERO, Norway", + "name": "Sandstad, Marit" + }, + { + "affiliation": "MetOffice, UK", + "name": "Sellar, Alistair" + }, + { + "affiliation": "DLR, Germany", + "name": "Senftleben, Daniel" + }, + { + "affiliation": "ISMAR-CNR, Italy", + "name": "Serva, Federico", + "orcid": "0000-0002-7118-0817" + }, + { + "affiliation": "CICERO, Norway", + "name": "Sillmann, Jana" + }, + { + "affiliation": "MPI-M, Germany", + "name": "Stacke, Tobias", + "orcid": "0000-0003-4637-5337" + }, + { + "affiliation": "URead, UK", + "name": "Swaminathan, Ranjini", + "orcid": "0000-0001-5853-2673" + }, + { + "affiliation": "BSC, Spain", + "name": "Torralba, Verónica" + }, + { + "affiliation": "University of Bremen, Germany", + "name": "Weigel, Katja", + "orcid": "0000-0001-6133-7801" + }, + { + "affiliation": "DLR, Germany", + "name": "Sarauer, Ellen" + }, + { + "affiliation": "University of Reading, UK", + "name": "Roberts, Charles", + "orcid": "0000-0002-1147-8961" + }, + { + "affiliation": "Netherlands eScience Center", + "name": "Kalverla, Peter", + "orcid": "0000-0002-5025-7862" + }, + { + "affiliation": "Netherlands eScience Center", + "name": "Alidoost, Sarah", + "orcid": "0000-0001-8407-6472" + }, + { + "affiliation": "Netherlands eScience Center", + "name": "Verhoeven, Stefan", + "orcid": "0000-0002-5821-2060" + }, + { + "affiliation": "Netherlands eScience Center", + "name": "Vreede, Barbara", + "orcid": "0000-0002-5023-4601" + }, + { + "affiliation": "Netherlands eScience Center", + "name": "Smeets, Stef", + "orcid": "0000-0002-5413-9038" + }, + { + "affiliation": "Netherlands eScience Center", + "name": "Soares Siqueira, Abel", + "orcid": "0000-0003-4451-281X" + }, + { + "affiliation": "DLR, Germany", + "name": "Kazeroni, Rémi", + "orcid": "0000-0001-7205-9528" + }, + { + "affiliation": "NASA, USA", + "name": "Potter, Jerry" + }, + { + "affiliation": "DLR, Germany", + "name": "Winterstein, Franziska", + "orcid": "0000-0002-2406-4936" + }, + { + "affiliation": "ACCESS-NRI, Australia", + "name": "Beucher, Romain", + "orcid": "0000-0003-3891-5444" + }, + { + "affiliation": "DLR, Germany", + "name": "Kraft, Jeremy" + }, + { + "affiliation": "University of Bremen, Germany", + "name": "Ruhe, Lukas", + "orcid": "0000-0001-6349-9118" + }, + { + "affiliation": "DLR, Germany", + "name": "Bonnet, Pauline", + "orcid": "0000-0003-3780-0784" + } + ], + "description": "ESMValTool: A community diagnostic and performance metrics tool for routine evaluation of Earth system models in CMIP.", + "license": { + "id": "Apache-2.0" + }, + "publication_date": "2023-07-06", + "title": "ESMValTool", + "version": "v2.9.0", + "communities": [ + { + "identifier": "is-enes3" + }, + { + "identifier": "dlr_de" + }, + { + "identifier": "ecfunded" + }, + { + "identifier": "nlesc" + } + ], + "grants": [ + { + "id": "10.13039/501100000780::282672" + }, + { + "id": "10.13039/501100000780::641727" + }, + { + "id": "10.13039/501100000780::641816" + }, + { + "id": "10.13039/501100000780::727862" + }, + { + "id": "10.13039/501100000780::776613" + }, + { + "id": "10.13039/501100000780::824084" + } + ] } diff --git a/CITATION.cff b/CITATION.cff index f069250a3d..147e3acd17 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -90,6 +90,11 @@ authors: family-names: Bock given-names: Lisa orcid: "https://orcid.org/0000-0001-7058-5938" + - + affiliation: "MetOffice, UK" + family-names: Bodas-Salcedo + given-names: Alejandro + orcid: "https://orcid.org/0000-0002-7890-2536" - affiliation: "BSC, Spain" family-names: Caron @@ -181,6 +186,11 @@ authors: family-names: Hassler given-names: Birgit orcid: "https://orcid.org/0000-0003-2724-709X" + - + affiliation: "DLR, Germany" + family-names: Heuer + given-names: Helge + orcid: "https://orcid.org/0000-0003-2411-7150" - affiliation: "BSC, Spain" family-names: Hunter @@ -199,6 +209,10 @@ authors: affiliation: "MPI for Biogeochemistry, Germany" family-names: Koirala given-names: Sujan + - + affiliation: "DLR, Germany" + family-names: Kuehbacher + given-names: Birgit - affiliation: "BSC, Spain" family-names: Lledó @@ -300,6 +314,10 @@ authors: family-names: Weigel given-names: Katja orcid: "https://orcid.org/0000-0001-6133-7801" + - + affiliation: "DLR, Germany" + family-names: Sarauer + given-names: Ellen - affiliation: "University of Reading, UK" family-names: Roberts @@ -349,13 +367,32 @@ authors: family-names: Winterstein given-names: Franziska orcid: "https://orcid.org/0000-0002-2406-4936" + - + affiliation: "ACCESS-NRI, Australia" + family-names: Beucher + given-names: Romain + orcid: "https://orcid.org/0000-0003-3891-5444" + - + affiliation: "DLR, Germany" + family-names: Kraft + given-names: Jeremy + - + affiliation: "University of Bremen, Germany" + family-names: Ruhe + given-names: Lukas + orcid: "https://orcid.org/0000-0001-6349-9118" + - + affiliation: "DLR, Germany" + family-names: Bonnet + given-names: Pauline + orcid: "https://orcid.org/0000-0003-3780-0784" cff-version: 1.2.0 -date-released: 2023-03-28 +date-released: 2023-07-06 doi: "10.5281/zenodo.3401363" license: "Apache-2.0" message: "If you use this software, please cite it using these metadata." repository-code: "https://github.com/ESMValGroup/ESMValTool/" title: ESMValTool -version: "v2.8.0" +version: "v2.9.0" ... diff --git a/README.md b/README.md index d8b11698dd..4fbe8aa84e 100644 --- a/README.md +++ b/README.md @@ -2,20 +2,20 @@ [![made-with-python](https://img.shields.io/badge/Made%20with-Python-1f425f.svg)](https://www.python.org/) [![Documentation Status](https://readthedocs.org/projects/esmvaltool/badge/?version=latest)](https://esmvaltool.readthedocs.io/en/latest/?badge=latest) [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.3401363.svg)](https://doi.org/10.5281/zenodo.3401363) -[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/ESMValGroup?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) +[![Chat on Matrix](https://matrix.to/img/matrix-badge.svg)](https://matrix.to/#/#ESMValGroup_Lobby:gitter.im) [![CircleCI](https://circleci.com/gh/ESMValGroup/ESMValTool/tree/main.svg?style=svg)](https://circleci.com/gh/ESMValGroup/ESMValTool/tree/main) [![Test in Full Development Mode](https://github.com/ESMValGroup/ESMValTool/actions/workflows/test-development.yml/badge.svg)](https://github.com/ESMValGroup/ESMValTool/actions/workflows/test-development.yml) [![Codacy Badge](https://app.codacy.com/project/badge/Coverage/79bf6932c2e844eea15d0fb1ed7e415c)](https://www.codacy.com/gh/ESMValGroup/ESMValTool?utm_source=github.com&utm_medium=referral&utm_content=ESMValGroup/ESMValTool&utm_campaign=Badge_Coverage) [![Codacy Badge](https://app.codacy.com/project/badge/Grade/79bf6932c2e844eea15d0fb1ed7e415c)](https://www.codacy.com/gh/ESMValGroup/ESMValTool?utm_source=github.com&utm_medium=referral&utm_content=ESMValGroup/ESMValTool&utm_campaign=Badge_Grade) [![Docker Build Status](https://img.shields.io/docker/cloud/build/esmvalgroup/esmvaltool.svg)](https://hub.docker.com/r/esmvalgroup/esmvaltool/) -[![Anaconda-Server Badge](https://anaconda.org/conda-forge/esmvaltool/badges/version.svg)](https://anaconda.org/conda-forge/esmvaltool) +[![Anaconda-Server Badge](https://img.shields.io/conda/vn/conda-forge/ESMValTool?color=blue&label=conda-forge&logo=conda-forge&logoColor=white)](https://anaconda.org/conda-forge/esmvaltool) ![stand with Ukraine](https://badgen.net/badge/stand%20with/UKRAINE/?color=0057B8&labelColor=FFD700) ![esmvaltoollogo](https://raw.githubusercontent.com/ESMValGroup/ESMValTool/main/doc/sphinx/source/figures/ESMValTool-logo-2.png) - [**Documentation**](https://docs.esmvaltool.org/en/latest/) - [**ESMValTool Website**](https://www.esmvaltool.org/) -- [**ESMValTool Tutorial**](https://esmvalgroup.github.io/ESMValTool_Tutorial/index.html) +- [**ESMValTool Tutorial**](https://tutorial.esmvaltool.org/index.html) - [**ESMValGroup Project on GitHub**](https://github.com/ESMValGroup) - [**Gallery**](https://docs.esmvaltool.org/en/latest/gallery.html) - [**`conda-forge` package feedstock**](https://github.com/conda-forge/esmvaltool-suite-feedstock) @@ -53,11 +53,11 @@ ESMValTool can run with the following types of [data as input](https://docs.esmv # Getting started -Please see [getting started](https://docs.esmvaltool.org/en/latest/quickstart/index.html) on readthedocs as well as [ESMValTool tutorial](https://esmvalgroup.github.io/ESMValTool_Tutorial/index.html). The tutorial is a set of lessons that together teach skills needed to work with ESMValTool in climate-related domains. +Please see [getting started](https://docs.esmvaltool.org/en/latest/quickstart/index.html) on our instance of Read the Docs as well as [ESMValTool tutorial](https://tutorial.esmvaltool.org/index.html). The tutorial is a set of lessons that together teach skills needed to work with ESMValTool in climate-related domains. ## Getting help -The easiest way to get help if you cannot find the answer in the documentation on [readthedocs](https://docs.esmvaltool.org), is to open an [issue on GitHub](https://github.com/ESMValGroup/ESMValTool/issues). +The easiest way to get help, if you cannot find the answer in the documentation in our [docs](https://docs.esmvaltool.org), is to open an [issue on GitHub](https://github.com/ESMValGroup/ESMValTool/issues). ## Contributing diff --git a/conda-linux-64.lock b/conda-linux-64.lock index e8915dafdc..b0facf9382 100644 --- a/conda-linux-64.lock +++ b/conda-linux-64.lock @@ -1,50 +1,46 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: cdee46d10f8bc1b75620e0bfb5babf885dd0f4e4fc49bd6d7ac9f226a3f0c8af +# input_hash: 05ccb0eb21a22bcda85523e4fc91a3c5d43c69b2fc56e17f5c81159b2e906d6b @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/_py-xgboost-mutex-2.0-gpu_0.tar.bz2#7702188077361f43a4d61e64c694f850 https://conda.anaconda.org/conda-forge/noarch/_r-mutex-1.0.1-anacondar_1.tar.bz2#19f9db5f4f1b7f5ef5f6d67207f25f38 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.5.7-hbcca054_0.conda#f5c65075fc34438d5b456c7f3f5ab695 -https://conda.anaconda.org/conda-forge/noarch/cuda-version-11.1-hdbd7af8_2.conda#54dfb03757dc2bcf80740a29f76894f4 +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.11.17-hbcca054_0.conda#01ffc8d36f9eba0ce0b3c1955fa780ee +https://conda.anaconda.org/conda-forge/noarch/cuda-version-11.8-h70ddcb2_2.conda#601900ec9ff06f62f76a247148e52c04 https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb -https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 -https://conda.anaconda.org/conda-forge/noarch/kernel-headers_linux-64-2.6.32-he073ed8_15.tar.bz2#5dd5127afd710f91f6a75821bac0a4f0 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_1.conda#6185f640c43843e5ad6fd1c5372c3f80 +https://conda.anaconda.org/conda-forge/noarch/kernel-headers_linux-64-2.6.32-he073ed8_16.conda#7ca122655873935e02c91279c5b03c8c https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda#7aca3059a1729aa76c597603f10b0dd3 -https://conda.anaconda.org/conda-forge/linux-64/libgcc-devel_linux-64-13.1.0-he3cc6c4_0.conda#5ec50dcd74ba7461709c4ac9c4cc4190 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.1.0-h15d22d2_0.conda#afb656a334c409dd9805508af1c89c7a -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-devel_linux-64-13.1.0-he3cc6c4_0.conda#e703914ad2288ab24cf5ac94d812fc11 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.1.0-hfd8a6a1_0.conda#067bcc23164642f4c226da631f2a2e1d +https://conda.anaconda.org/conda-forge/noarch/libgcc-devel_linux-64-13.2.0-ha9c7c90_103.conda#db8cd1a871a07404d94f7dcc78c21a61 +https://conda.anaconda.org/conda-forge/noarch/libstdcxx-devel_linux-64-13.2.0-ha9c7c90_103.conda#46947f93254fdedc5ae0725b11ca3610 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.2.0-h7e041cc_3.conda#937eaed008f6bf2191c5fe76f87755e9 https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda#d8d7293c5b37f39b2ac32940621c6592 -https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.11-3_cp311.conda#c2e2630ddb68cf52eec74dc7dfab20b5 +https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.11-4_cp311.conda#d786502c97404c94d7d58d258a445a65 https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda#939e3e74d8be4dac89ce83b20de2492a https://conda.anaconda.org/conda-forge/linux-64/xorg-imake-1.0.7-0.tar.bz2#23acfc5a339a6a34cc2241f64e4111be https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.1.0-h69a702a_0.conda#506dc07710dd5b0ba63cbf134897fc10 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.1.0-he5830b7_0.conda#56ca14d57ac29a75d23a39eb3ee0ddeb -https://conda.anaconda.org/conda-forge/noarch/sysroot_linux-64-2.12-he073ed8_15.tar.bz2#66c192522eacf5bb763568b4e415d133 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.2.0-h807b86a_3.conda#7124cbb46b13d395bdde68f2d215c989 +https://conda.anaconda.org/conda-forge/noarch/sysroot_linux-64-2.12-he073ed8_16.conda#071ea8dceff4d30ac511f4a2f8437cd1 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d https://conda.anaconda.org/conda-forge/linux-64/binutils_impl_linux-64-2.40-hf600244_0.conda#33084421a8c0af6aef1b439707f7662a https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.1.0-he5830b7_0.conda#cd93f779ff018dd85c7544c015c9db3c +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.2.0-h807b86a_3.conda#23fdf1fef05baeb7eadc2aed5fb0011f https://conda.anaconda.org/conda-forge/linux-64/aom-3.5.0-h27087fc_0.tar.bz2#a08150fd2298460cd1fcccf626305642 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.8.19-hd590300_0.conda#81bd50906818d08c2f98d6d9f94cbd02 -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.19.1-hd590300_0.conda#e8c18d865be43e2fb3f7a145b6adf1f5 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.8.23-hd590300_0.conda#cc4f06f7eedb1523f3b83fd0fb3942ff +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-hd590300_5.conda#69b8b6202a07720f448be700e300ccf4 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.23.0-hd590300_0.conda#d459949bc10f64dee1595c176c2e6291 https://conda.anaconda.org/conda-forge/linux-64/charls-2.4.2-h59595ed_0.conda#4336bd67920dd504cd8c6761d6a99645 -https://conda.anaconda.org/conda-forge/linux-64/cudatoolkit-11.1.1-ha002fc5_11.conda#2b11133c35a4899a29fc5109d8f95d2e https://conda.anaconda.org/conda-forge/linux-64/dav1d-1.2.1-hd590300_0.conda#418c6ca5929a611cbd69204907a83995 -https://conda.anaconda.org/conda-forge/linux-64/fftw-3.3.10-nompi_hc118613_107.conda#28b2b46b350ddb6a01d061392f75af54 https://conda.anaconda.org/conda-forge/linux-64/freexl-1.0.6-h166bdaf_1.tar.bz2#897e772a157faf3330d72dd291486f62 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 https://conda.anaconda.org/conda-forge/linux-64/geos-3.11.2-hcb278e6_0.conda#3b8e364995e3575e57960d29c1e5ab14 https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2#14947d8770185e5153fdd04d4673ed37 https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-he1b5a44_1004.tar.bz2#cddaf2c63ea4a5901cf09524c490ecdc -https://conda.anaconda.org/conda-forge/linux-64/ghostscript-9.54.0-h27087fc_2.tar.bz2#c3b35ac18d09ffc8d46064fb09a696af +https://conda.anaconda.org/conda-forge/linux-64/ghostscript-10.02.1-h59595ed_0.conda#3750ef83be92ff8de6e44da80d509847 https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h0b41bf4_3.conda#96f3b11872ef6fad973eac856cd2624f -https://conda.anaconda.org/conda-forge/linux-64/gmp-6.2.1-h58526e2_0.tar.bz2#b94cf2db16066b242ebd26db2facbd56 +https://conda.anaconda.org/conda-forge/linux-64/gmp-6.3.0-h59595ed_0.conda#0e33ef437202db431aa5a928248cf2e8 https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 https://conda.anaconda.org/conda-forge/linux-64/icu-72.1-hcb278e6_0.conda#7c8d20d847bb45f56bd941578fcfa146 https://conda.anaconda.org/conda-forge/linux-64/jbig-2.1-h7f98852_2003.tar.bz2#1aa0cee79792fa97b7ff4545110b60bf @@ -52,41 +48,40 @@ https://conda.anaconda.org/conda-forge/linux-64/json-c-0.16-hc379101_0.tar.bz2#0 https://conda.anaconda.org/conda-forge/linux-64/jxrlib-1.1-h7f98852_2.tar.bz2#8e787b08fe19986d99d034b839df2961 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f -https://conda.anaconda.org/conda-forge/linux-64/libabseil-20230125.2-cxx17_h59595ed_2.conda#f67106643beadfc737b94ca0bfd6d8e3 -https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda#0f683578378cddb223e7fd24f785ab2a -https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_8.tar.bz2#9194c9bf9428035a05352d031462eae4 +https://conda.anaconda.org/conda-forge/linux-64/libabseil-20230125.3-cxx17_h59595ed_0.conda#d1db1b8be7c3a8983dcbbbfe4f0765de +https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.2-h59595ed_1.conda#127b0be54c1c90760d7fe02ea7a56426 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.0.9-h166bdaf_9.conda#61641e239f96eae2b8492dc7e755828c https://conda.anaconda.org/conda-forge/linux-64/libcrc32c-1.1.2-h9c3ff4c_0.tar.bz2#c965a5aa0d5c1c37ffc62dff36e28400 https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.18-h0b41bf4_0.conda#6aa9c9de5542ecb07fdda9ca626252d8 -https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 +https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda#6305a3dd2752c76335295da4e581f2fd https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.2.0-ha4646dd_3.conda#c714d905cdfa0e70200f68b80cc04764 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2#b62b52da46c39ee2bc3c162ac7f1804d -https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-h0b41bf4_0.conda#1edd9e67bdb90d78cea97733ff6b54e6 -https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2#39b1328babf85c7c3a61636d9cd50206 +https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-hd590300_1.conda#323e90742f0f48fc22bea908735f55e6 +https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 https://conda.anaconda.org/conda-forge/linux-64/libnuma-2.0.16-h0b41bf4_1.conda#28bfe2cb11357ccc5be21101a6b7ce86 -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.23-pthreads_h80387f5_0.conda#9c5ea51ccb8ffae7d06c645869d24ce6 -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-ilp64-0.3.23-pthreads_h5c82d6a_0.conda#14e3fb938a49ea46df201eb105bb9068 -https://conda.anaconda.org/conda-forge/linux-64/libsanitizer-13.1.0-hfd8a6a1_0.conda#7594fd17fb4d1b8b0e47a6b306fe01ae +https://conda.anaconda.org/conda-forge/linux-64/libopenlibm4-0.8.1-hd590300_1.conda#e6af610e01d04927a5060c95ce4e0875 +https://conda.anaconda.org/conda-forge/linux-64/libsanitizer-13.2.0-h7e041cc_3.conda#c63848839569bb82a3eff11f01e5de00 https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.18-h36c2ea0_1.tar.bz2#c3788462a6fbddafdb413a9f9053e58d https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-h27087fc_0.conda#f204c8ba400ec475452737094fb81d52 https://conda.anaconda.org/conda-forge/linux-64/libunwind-1.6.2-h9c3ff4c_0.tar.bz2#a730b2badd586580c5752cc73842e068 https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.8.0-h166bdaf_0.tar.bz2#ede4266dc02e875fe1ea77b25dd43747 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.0-h0b41bf4_0.conda#0d4a7508d8c6c65314f2b9c1f56ad408 -https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-h166bdaf_4.tar.bz2#f3f9de449d32ca9b9c66a22863c96f41 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.1-hd590300_0.conda#82bf6f63eb15ef719b556b63feec3a77 +https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda#f36c115f1ee199da648e0597ec2047ad https://conda.anaconda.org/conda-forge/linux-64/libzopfli-1.0.3-h9c3ff4c_0.tar.bz2#c66fe2d123249af7651ebde8984c51c2 https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-h516909a_1000.tar.bz2#bb14fcb13341b81d5eb386423b9d2bac https://conda.anaconda.org/conda-forge/linux-64/make-4.3-hd18ef5c_1.tar.bz2#4049ebfd3190b580dffe76daed26155a https://conda.anaconda.org/conda-forge/linux-64/mbedtls-3.3.0-hcb278e6_0.conda#cc1213f464c357b647cc5dde5cfca881 -https://conda.anaconda.org/conda-forge/linux-64/metis-5.1.0-h58526e2_1006.tar.bz2#d099b812378b1e133c12e3b75167d83a -https://conda.anaconda.org/conda-forge/linux-64/nccl-2.18.1.1-h12f7317_0.conda#27a927b408c246762a9d891a3e6e1c2c -https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-hcb278e6_0.conda#681105bccc2a3f7f1a837d47d39c9179 +https://conda.anaconda.org/conda-forge/linux-64/metis-5.1.1-h59595ed_2.conda#9ba5910c34210e7ad60736d172bbcd4c +https://conda.anaconda.org/conda-forge/linux-64/nccl-2.19.4.1-h6103f9b_0.conda#2946f0e841f1f0be90c90bc67877d417 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-h59595ed_2.conda#7dbaa197d7ba6032caf7ae7f32c1efa0 https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 -https://conda.anaconda.org/conda-forge/linux-64/openlibm-0.8.1-h7f98852_0.tar.bz2#ba0c56761f2380babaa783b2a37b4d08 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.1-hd590300_1.conda#2e1d7b458ac8f1e3ca4e18b77add6277 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.4-hd590300_0.conda#412ba6938c3e2abaca8b1129ea82e238 https://conda.anaconda.org/conda-forge/linux-64/p7zip-16.02-h9c3ff4c_1001.tar.bz2#941066943c0cac69d5aa52189451aa5f -https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.42.2-h59595ed_0.conda#700edd63ccd5fc66b70b1c028cea9a68 https://conda.anaconda.org/conda-forge/linux-64/pkg-config-0.29.2-h36c2ea0_1008.tar.bz2#fbef41ff6a4c8140c30057466a1cdd47 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/rdma-core-28.9-h59595ed_1.conda#aeffb7c06b5f65e55e6c637408dc4100 @@ -94,408 +89,411 @@ https://conda.anaconda.org/conda-forge/linux-64/re2-2023.03.02-h8c504da_0.conda# https://conda.anaconda.org/conda-forge/linux-64/sed-4.8-he412f7d_0.tar.bz2#7362f0042e95681f5d371c46c83ebd08 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda#e6d228cd0bb74a51dd18f5bfce0b4115 https://conda.anaconda.org/conda-forge/linux-64/tzcode-2023c-h0b41bf4_0.conda#0c0533894f21c3d35697cb8378d390e2 +https://conda.anaconda.org/conda-forge/linux-64/uriparser-0.9.7-hcb278e6_1.conda#2c46deb08ba9b10e90d0a6401ad65deb https://conda.anaconda.org/conda-forge/linux-64/xorg-inputproto-2.3.2-h7f98852_1002.tar.bz2#bcd1b3396ec6960cbc1d2855a9e60b2b https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a -https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.0.10-h7f98852_0.tar.bz2#d6b0b50b49eccfe0be0373be628be0f3 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hd590300_0.conda#b462a33c0be1421532f28bfe8f4a7514 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.conda#2c80dc38fface310c9bd81b17037fee5 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 https://conda.anaconda.org/conda-forge/linux-64/xorg-makedepend-1.0.8-h59595ed_0.conda#eb9b80b3efdb29ad359dc0438e6755fa https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda#bce9f945da8ad2ae9b1d7165a64d0f87 https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 -https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.1-h0b41bf4_0.conda#e9c3bcf0e0c719431abec8ca447eee27 +https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae -https://conda.anaconda.org/conda-forge/linux-64/zfp-1.0.0-h27087fc_3.tar.bz2#0428af0510c3fafedf1c66b43102a34b +https://conda.anaconda.org/conda-forge/linux-64/zfp-1.0.0-h59595ed_4.conda#9cfbafab420f42b572f3c032ad59da85 https://conda.anaconda.org/conda-forge/linux-64/zlib-ng-2.0.7-h0b41bf4_0.conda#49e8329110001f04923fe7e864990b0c -https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.5.26-hf677bf3_1.conda#7d00f2b22493e28400fdbea8dc110790 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.16-hbad4bc6_7.conda#d58359b64c6d1256c07eeaee753159e3 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.9-hbad4bc6_2.conda#eee3831810e132b6caf45f03c3428363 -https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.14-hbad4bc6_7.conda#916c2a86bf786aab811a60990f7538ed +https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.6.0-h93469e0_0.conda#580a52a05f5be28ce00764149017c6d4 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.17-h862ab75_1.conda#0013fcee7acb3cfc801c5929824feb3c +https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.11-h862ab75_1.conda#6fbc9bd49434eb36d3a59c5020f4af95 +https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.16-h862ab75_1.conda#f883d61afbc95c50f7b3f62546da4235 https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda#8b9b5aca60558d02ddaa09d599e55920 -https://conda.anaconda.org/conda-forge/linux-64/gcc_impl_linux-64-13.1.0-hc4be1a9_0.conda#99d1a8a8ee1665ee9435f8d160df69fe +https://conda.anaconda.org/conda-forge/linux-64/gcc_impl_linux-64-13.2.0-h338b0a0_3.conda#79ae2d39f23e568b18be949973e9a025 https://conda.anaconda.org/conda-forge/linux-64/glog-0.6.0-h6f12383_0.tar.bz2#b31f3565cb84435407594e548a2fb7b2 https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 https://conda.anaconda.org/conda-forge/linux-64/libavif-0.11.1-h8182462_2.conda#41c399ed4c439e37b844c24ab5621b5a -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-17_linux64_openblas.conda#57fb44770b1bc832fb2dbefa1bd502de -https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_8.tar.bz2#4ae4d7795d33e02bd20f6b23d91caf82 -https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_8.tar.bz2#04bac51ba35ea023dc48af73c1c88c25 +https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.0.9-h166bdaf_9.conda#081aa22f4581c08e4372b0b6c2f8478e +https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.0.9-h166bdaf_9.conda#1f0a03af852a9659ed2bf08f2f1704fd https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d -https://conda.anaconda.org/conda-forge/linux-64/libllvm14-14.0.6-hcd5def8_3.conda#5c159aa79cab06c55aabcecdd9117f31 -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda#613955a50485812985c059e7b269f42e +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.2.0-h69a702a_3.conda#73031c79546ad06f1fe62e57fdd021bc +https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-h01aab08_1016.conda#4d0907546d556ef7f14b1dcfa0e217ce +https://conda.anaconda.org/conda-forge/linux-64/libllvm14-14.0.6-hcd5def8_4.conda#73301c133ded2bf71906aa2104edae8b +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_0.conda#9b13d5ee90fc9f09d54fd403247342b4 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda#e1c890aebdebbfbf87e2c917187b4416 -https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-3.21.12-h3eb15da_0.conda#4b36c68184c6c85d88c6e595a32a1ede +https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-4.23.3-hd1fb520_1.conda#78c10e8637a6f8d377f9989327d0267d https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-h0d5128d_13.conda#e1d6139ff0500977a760567a4bec1ce9 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.42.0-h2797004_0.conda#fdaae20a1cf7cd62130a0973190a31b7 +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.44.2-h2797004_0.conda#3b6a9f225c3dbe0d24f4fedd4625c5bf https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe +https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c -https://conda.anaconda.org/conda-forge/linux-64/libxgboost-1.7.4-cuda111ha8527ea_2.conda#c3d7574f9f6f4066c05eb935f3d96672 -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.10.4-hfdac1af_0.conda#241845899caff54ac1d2b3102ad988cf -https://conda.anaconda.org/conda-forge/linux-64/libzip-1.9.2-hc929e4a_1.tar.bz2#5b122b50e738c4be5c3f2899f010d7cf -https://conda.anaconda.org/conda-forge/linux-64/mpfr-4.2.0-hb012696_0.conda#14d87bdff2cbd3b1179a29fb316ed743 -https://conda.anaconda.org/conda-forge/linux-64/openblas-ilp64-0.3.23-pthreads_h3d04fff_0.conda#9787e8492f0a51a6b0498dc86f3864f9 -https://conda.anaconda.org/conda-forge/linux-64/pandoc-2.19.2-h32600fe_2.conda#326f46f36d15c44cff5f81d505cb717f +https://conda.anaconda.org/conda-forge/linux-64/libxgboost-1.7.6-cuda118_hd3b444d_6.conda#78509401d0879ef5aac72b8d1e104493 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h0d562d8_0.conda#558ab736404275d7df61c473c1af35aa +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b +https://conda.anaconda.org/conda-forge/linux-64/mpfr-4.2.1-h9458935_0.conda#4c28f3210b30250037a4a627eeee9e0f +https://conda.anaconda.org/conda-forge/linux-64/openlibm-0.8.1-hd590300_1.conda#6eba22eb06d69e53d0ca01eef42bc675 https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b -https://conda.anaconda.org/conda-forge/linux-64/perl-5.32.1-2_h7f98852_perl5.tar.bz2#09ba115862623f00962e9809ea248f1a +https://conda.anaconda.org/conda-forge/linux-64/perl-5.32.1-4_hd590300_perl5.conda#3e785bff761095eb7f8676f4694bd1b1 https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 -https://conda.anaconda.org/conda-forge/linux-64/s2n-1.3.44-h06160fa_0.conda#968cb0fca1249fe9778876201dd2b828 -https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2#5b8c42eb62e9fc961af70bdd6a26e168 -https://conda.anaconda.org/conda-forge/linux-64/ucx-1.14.1-h78ab4a6_2.conda#afb7b0b482fe84c26a8940876b41c2dc +https://conda.anaconda.org/conda-forge/linux-64/s2n-1.3.46-h06160fa_0.conda#413d96a0b655c8f8aacc36473a2dbb04 +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc +https://conda.anaconda.org/conda-forge/linux-64/ucx-1.14.1-h64cca9d_5.conda#39aa3b356d10d7e5add0c540945a0944 https://conda.anaconda.org/conda-forge/linux-64/xorg-fixesproto-5.0-h7f98852_1002.tar.bz2#65ad6e1eb4aed2b0611855aff05e04f6 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3 -https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.4-h9c3ff4c_1.tar.bz2#21743a8d2ea0c8cfbbf8fe489b0347df -https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-h166bdaf_4.tar.bz2#4b11e365c0275b808be78b30f904e295 -https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.2-h3eb15da_6.conda#6b63daed8feeca47be78f323e793d555 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.13.21-h9fef7b8_5.conda#0e64949e8f740ceeb9f1d6255f314ab2 -https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.4-h0f2a231_0.conda#876286b5941933a0f558777e57d883cc +https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 +https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h59595ed_0.conda#8851084c192dbc56215ac4e3c9aa30fa +https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda#68c34ec6149623be41a1933ab996a209 +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda#04b88013080254850d6c01ed54810589 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.13.28-h3870b5a_0.conda#b775667301ab249f94ad2bea91fc4223 +https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-h0f2a231_0.conda#009521b7ed97cca25f8f997f9e745976 https://conda.anaconda.org/conda-forge/linux-64/boost-cpp-1.78.0-h6582d0a_3.conda#d3c3c7698d0b878aab1b86db95407c8e -https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_8.tar.bz2#e5613f2bc717e9945840ff474419b8e4 +https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.0.9-h166bdaf_9.conda#d47dee1856d9cb955b8076eeff304a5b https://conda.anaconda.org/conda-forge/linux-64/bwidget-1.9.14-ha770c72_1.tar.bz2#5746d6202ba2abad4a4707f2a2462795 -https://conda.anaconda.org/conda-forge/linux-64/c-blosc2-2.9.2-hb4ffafa_0.conda#e029f773ae3355c8a05ad7c3db2f8a4b -https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda#e1232042de76d24539a436d37597eb06 -https://conda.anaconda.org/conda-forge/linux-64/gfortran_impl_linux-64-13.1.0-hd511a9b_0.conda#e0dead77d88f1bcc59c0e0c2f8975af1 -https://conda.anaconda.org/conda-forge/linux-64/gxx_impl_linux-64-13.1.0-hc4be1a9_0.conda#e6591b3c81fc5fb83e342b20a2506e80 +https://conda.anaconda.org/conda-forge/linux-64/c-blosc2-2.11.3-hb4ffafa_0.conda#f394ac64ab0e1fcb0152cc9c16df3d85 +https://conda.anaconda.org/conda-forge/linux-64/fftw-3.3.10-nompi_hc118613_108.conda#6fa90698000b05dfe8ce6515794fe71a +https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb +https://conda.anaconda.org/conda-forge/linux-64/gfortran_impl_linux-64-13.2.0-h76e1118_3.conda#4a04c8f0a51d525776f267112198684d +https://conda.anaconda.org/conda-forge/linux-64/gxx_impl_linux-64-13.2.0-h338b0a0_3.conda#a5e463121f06f300e5462f98b82d0709 https://conda.anaconda.org/conda-forge/linux-64/hdfeos2-2.20-hebf79cf_1003.conda#23bb57b64a629bc3b33379beece7f0d7 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.20.1-h81ceb04_0.conda#89a41adce7106749573d883b2f657d78 -https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.6.2-h3d51595_0.conda#9f915b4adeb9dcfd450b9ad238e2db4c -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-17_linux64_openblas.conda#7ef0969b00fe3d6eef56a8151d3afb29 +https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.6.2-h039dbb9_1.conda#29cf970521d30d113f3425b84cb250f6 https://conda.anaconda.org/conda-forge/linux-64/libgit2-1.5.1-h1f77430_0.conda#16802fd0c80290248ea79a570bd83b95 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.3-hebfc3b9_0.conda#a64f11b244b2c112cd3fa1cbe9493999 -https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.0-hac7e632_1002.conda#4c4dce87e96b321308f81ba2c10d2897 -https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.54.2-hb20ce57_2.conda#2d6c2c90dd7805816bd78d80977b61d6 -https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.9.1-hd6dc26d_0.conda#a3ede1b8e47f993ff1fe3908b23bb307 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-17_linux64_openblas.conda#a2103882c46492e26500fcb56c03de8b +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.1-hebfc3b9_0.conda#ddd09e8904fde46b85f41896621803e6 +https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.56.2-h3905398_1.conda#0b01e6ff8002994bd4ddbffcdbec7856 +https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.9.3-default_h554bfaf_1009.conda#f36ddc11ca46958197a45effdd286e45 +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.25-pthreads_h413a1c8_0.conda#d172b34a443b95f86089e8229ddc9a17 +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-ilp64-0.3.25-pthreads_h384dd9e_0.conda#637ee73ecb26ad42c38278a6619a20ca https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.18.1-h8fd135c_2.conda#bbf65f7688512872f063810623b755dc -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.0-ha587672_6.conda#4e5ee4b062c21519efbee7e2ae608748 -https://conda.anaconda.org/conda-forge/linux-64/libxslt-1.1.37-h873f0b0_0.tar.bz2#ed0d77d947ddeb974892de8df7224d12 -https://conda.anaconda.org/conda-forge/linux-64/nss-3.89-he45b914_0.conda#2745719a58eeaab6657256a3f142f099 -https://conda.anaconda.org/conda-forge/linux-64/orc-1.8.3-h2f23424_1.conda#bf63c66993744a1d4b59a6cfdb59524e -https://conda.anaconda.org/conda-forge/linux-64/python-3.11.4-hab00c5b_0_cpython.conda#1c628861a2a126b9fc9363ca1b7d014e -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.42.0-h2c6b66d_0.conda#1192f6ec654a5bc4ee1d64bdc4a3e5cc -https://conda.anaconda.org/conda-forge/linux-64/tktable-2.10-hb7b940f_3.tar.bz2#ea4d0879e40211fa26f38d8986db1bbe -https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-hc3e0081_0.tar.bz2#d4c341e0379c31e9e781d4f204726867 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.5-h8ee46fc_0.conda#742d9cd4a7da3ac6345f986e5da3b18d +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_1.conda#5b09e13d732dda1a2bc9adc711164f4d +https://conda.anaconda.org/conda-forge/linux-64/libxslt-1.1.37-h0054252_1.conda#f27960e8873abb5476e96ef33bdbdccd +https://conda.anaconda.org/conda-forge/linux-64/nss-3.95-h1d7d5a4_0.conda#d3a8067adcc45a923f4b1987c91d69da +https://conda.anaconda.org/conda-forge/linux-64/orc-1.9.0-h385abfd_1.conda#2cd5aac7ef1b4c6ac51bf521251a89b3 +https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.1.3-h32600fe_0.conda#8287aeb8462e2d4b235eff788e75919d +https://conda.anaconda.org/conda-forge/linux-64/python-3.11.6-hab00c5b_0_cpython.conda#b0dfbe2fcbfdb097d321bfd50ecddab1 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.44.2-h2c6b66d_0.conda#4f2892c672829693fd978d065db4e8be +https://conda.anaconda.org/conda-forge/linux-64/tktable-2.10-h0c5db8f_5.conda#9464044754ea25557a9c93f0327d90a6 +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.7-h8ee46fc_0.conda#49e482d882669206653b095f5206c05b https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_0.conda#ae5f4ad87126c55ba3f690ef07f81d64 https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.13-pyhd8ed1ab_0.conda#06006184e203b61d3525f90de394471e -https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py311h38be061_1003.tar.bz2#0ab8f8f0cae99343907fe68cda11baea -https://conda.anaconda.org/conda-forge/linux-64/arpack-3.7.0-hdefa2d7_2.tar.bz2#8763fe86163198ef1778d1d8d22bb078 +https://conda.anaconda.org/conda-forge/noarch/antlr-python-runtime-4.11.1-pyhd8ed1ab_0.tar.bz2#15109c4977d39ad7aa3423f57243e286 https://conda.anaconda.org/conda-forge/noarch/asciitree-0.3.3-py_2.tar.bz2#c0481c9de49f040272556e2cedf42816 https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b https://conda.anaconda.org/conda-forge/noarch/attrs-23.1.0-pyh71513ae_1.conda#3edfead7cedd1ab4400a6c588f3e75f8 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.2.20-hb4b372c_7.conda#7eb8e72640ac21ce4e7d26e873a21cbe -https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.7.7-h2632f9a_4.conda#f4cd59b8e2ac740faded0f75aa965a71 -https://conda.anaconda.org/conda-forge/linux-64/backports.zoneinfo-0.2.1-py311h38be061_7.tar.bz2#ec62b3c5b953cb610f5e2b09cd776caf -https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_8.tar.bz2#2ff08978892a3e8b954397c461f18418 -https://conda.anaconda.org/conda-forge/noarch/certifi-2023.5.7-pyhd8ed1ab_0.conda#5d1b71c942b8421285934dad1d891ebc +https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.3.1-h9599702_1.conda#a8820ce2dbe6f7d54f6540d9a3a0028a +https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.7.11-hbe98c3e_0.conda#067641478d8f706b80a5a434a22b82be +https://conda.anaconda.org/conda-forge/linux-64/backports.zoneinfo-0.2.1-py311h38be061_8.conda#5384590f14dfe6ccd02811236afc9f8e +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.0.9-h166bdaf_9.conda#4601544b4982ba1861fa9b9c607b2c06 +https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.0.9-py311ha362b79_9.conda#ced5340f5dc6cff43a80deac8d0e398f +https://conda.anaconda.org/conda-forge/noarch/certifi-2023.11.17-pyhd8ed1ab_0.conda#2011bcf45376341dd1d690263fdbc789 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.1.0-pyhd8ed1ab_0.conda#7fcff9f6f123696e940bda77bd4d6551 -https://conda.anaconda.org/conda-forge/noarch/click-8.1.3-unix_pyhd8ed1ab_2.tar.bz2#20e4087407c7cb04a40817114b333dbf -https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda#b325bfc4cff7d7f8a868f1f7ecc4ed16 -https://conda.anaconda.org/conda-forge/noarch/codespell-2.2.4-pyhd8ed1ab_0.conda#27996543252c93207e54bb35daf80998 +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda#7f4a9e3fcff3f6356ae99244a014da6a +https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda#753d29fe41bb881e4b9c004f0abf973f +https://conda.anaconda.org/conda-forge/noarch/codespell-2.2.6-pyhd8ed1ab_0.conda#a206349b7bb7475ae580f987cb425bdd https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/config-0.5.1-pyhd8ed1ab_0.tar.bz2#97275d4898af65967b1ad57923cef770 -https://conda.anaconda.org/conda-forge/noarch/configargparse-1.5.3-pyhd8ed1ab_0.tar.bz2#318b72c3c2dfca9aebdbaf258609d02d -https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2#a50559fad0affdbb33729a68669ca1cb -https://conda.anaconda.org/conda-forge/linux-64/cython-0.29.35-py311hb755f60_0.conda#17f4738a1ca6155a63d2a0cbd3e4a8b1 +https://conda.anaconda.org/conda-forge/noarch/configargparse-1.7-pyhd8ed1ab_0.conda#0d07dc29b1c1cc973f76b74beb44915f +https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.6-py311hb755f60_0.conda#88cc84238dda72e11285d9cfcbe43e51 https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2#961b3a227b437d82ad7054484cfa71b2 -https://conda.anaconda.org/conda-forge/noarch/dill-0.3.6-pyhd8ed1ab_1.tar.bz2#88c82ca702197fff8a5e87619707556b -https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.6-pyhd8ed1ab_0.tar.bz2#b65b4d50dbd2d50fa0aeac367ec9eed7 -https://conda.anaconda.org/conda-forge/linux-64/docutils-0.20.1-py311h38be061_0.conda#207175b7d514d42f977ec505800d6824 +https://conda.anaconda.org/conda-forge/noarch/dill-0.3.7-pyhd8ed1ab_0.conda#5e4f3466526c52bc9af2d2353a1460bd +https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.7-pyhd8ed1ab_0.conda#12d8aae6994f342618443a8f05c652a0 +https://conda.anaconda.org/conda-forge/linux-64/docutils-0.20.1-py311h38be061_3.conda#1c33f55e5cdcc2a2b973c432b5225bfe https://conda.anaconda.org/conda-forge/noarch/dodgy-0.2.1-py_0.tar.bz2#62a69d073f7446c90f417b0787122f5b https://conda.anaconda.org/conda-forge/noarch/ecmwf-api-client-1.6.3-pyhd8ed1ab_0.tar.bz2#15621abf59053e184114d3e1d4f9d01e https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2#3cf04868fee0a029769bd41f4b2fbf2d https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-1.1.0-pyhd8ed1ab_0.conda#a2f2138597905eaa72e561d8efb42cf3 -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.1.1-pyhd8ed1ab_0.conda#7312299d7a0ea4993159229b7d2dceb2 -https://conda.anaconda.org/conda-forge/noarch/execnet-1.9.0-pyhd8ed1ab_0.tar.bz2#0e521f7a5e60d508b121d38b04874fb2 +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.0-pyhd8ed1ab_0.conda#f6c211fee3c98229652b60a9a42ef363 +https://conda.anaconda.org/conda-forge/noarch/execnet-2.0.2-pyhd8ed1ab_0.conda#67de0d8241e1060a479e3c37793e26f9 https://conda.anaconda.org/conda-forge/noarch/fasteners-0.17.3-pyhd8ed1ab_0.tar.bz2#348e27e78a5e39090031448c72f66d5e -https://conda.anaconda.org/conda-forge/noarch/filelock-3.12.1-pyhd8ed1ab_0.conda#1f262528bc0ca9d410b98c02d09de3ac +https://conda.anaconda.org/conda-forge/noarch/filelock-3.13.1-pyhd8ed1ab_0.conda#0c1729b74a8152fde6a38ba0a2ab9f45 https://conda.anaconda.org/conda-forge/noarch/findlibs-0.0.5-pyhd8ed1ab_0.conda#8f325f63020af6f7acbe2c4cb4c920db https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d -https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.3.3-py311hd4cff14_0.tar.bz2#b81ebef162551d6cf909263695fd6d6b -https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.6.0-pyh1a96a4e_0.conda#50ea2067ec92dfcc38b4f07992d7e235 +https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.4.0-py311h459d7ec_1.conda#23d0b2d02252b32ee14e5063ccfb41e2 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.12.1-pyhca7485f_0.conda#b38946846cdf39f9bce93f75f571d913 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6b639ba_2.conda#ee8220db21db8094998005990418fe5b https://conda.anaconda.org/conda-forge/noarch/geographiclib-1.52-pyhd8ed1ab_0.tar.bz2#6880e7100ebae550a33ce26663316d85 -https://conda.anaconda.org/conda-forge/linux-64/gsl-2.7-he838d99_0.tar.bz2#fec079ba39c9cca093bf4c00001825de -https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 -https://conda.anaconda.org/conda-forge/linux-64/humanfriendly-10.0-py311h38be061_4.tar.bz2#5c4f38a9e482f00a7bf23fe479c8ca29 -https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2#34272b248891bddccc64479f9a7fffed +https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe +https://conda.anaconda.org/conda-forge/noarch/humanfriendly-10.0-pyhd8ed1ab_6.conda#2ed1fe4b9079da97c44cfe9c2e5078fd +https://conda.anaconda.org/conda-forge/noarch/idna-3.6-pyhd8ed1ab_0.conda#1a76f09108576397c41c0b0c5bd84134 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.1.2-pyhd8ed1ab_0.tar.bz2#3c3de74912f11d2b590184f03c7cd09b -https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.4-py311h4dd048b_1.tar.bz2#46d451f575392c01dc193069bd89766d -https://conda.anaconda.org/conda-forge/linux-64/lazy-object-proxy-1.9.0-py311h2582759_0.conda#07745544b144855ed4514a4cf0aadd74 -https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.2-pyhd8ed1ab_0.conda#d060d017720c9882c4eca0544a4a0592 +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.5-py311h9547e67_1.conda#2c65bdf442b0d37aad080c8a4e0d452f +https://conda.anaconda.org/conda-forge/linux-64/lazy-object-proxy-1.9.0-py311h459d7ec_1.conda#7cc99d87755a9e64586a6004c5f0f534 +https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.3-pyhd8ed1ab_0.conda#69ea1d0fa7ab33b48c88394ad1dead65 https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda#980d8aca0bc23ca73fa8caa3e7c84c28 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-20_linux64_openblas.conda#2b7bb4f7562c8cf334fc2e20c2d28abc https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.1.2-h409715c_0.conda#50c873c9660ed116707ae15b663928d8 -https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-h37653c0_1015.tar.bz2#37d3747dd24d604f63d2610910576e63 -https://conda.anaconda.org/conda-forge/linux-64/libpq-15.3-hbcd7760_0.conda#e945f0fd2471f9b51b32819c1ea83577 -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.0-hb47c5f0_0.conda#9cfd7ad6e1539ca1ad172083586b3301 -https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.40.0-py311ha6695c7_0.conda#4524604af5d8545cdef802c3a4c3951d +https://conda.anaconda.org/conda-forge/linux-64/libpq-15.3-hbcd7760_1.conda#8afb2a97d256ffde95b91a6283bc598c +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.1-hbf2b3c1_0.conda#4963f3f12db45a576f2b8fbe9a0b8569 +https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.41.1-py311ha6695c7_0.conda#60fa8c1f3fb0d99dd10a9af2aff9c400 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 -https://conda.anaconda.org/conda-forge/linux-64/lxml-4.9.2-py311h14a6109_0.conda#cad902ff23dfa44e54e6daa046593a17 -https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.2-py311h9f220a4_0.conda#b8aad2507303e04037e8d02d8ac54217 -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py311h459d7ec_0.conda#9904dc4adb5d547cb21e136f98cb24b0 +https://conda.anaconda.org/conda-forge/linux-64/lxml-4.9.3-py311h1a07684_1.conda#aab51e50d994e58efdfa5382139b0468 +https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.2-py311h38e4bf4_1.conda#f8e0b648d77bbe44d1fe8af8cc56a590 +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.3-py311h459d7ec_1.conda#71120b5155a0c500826cf81536721a15 https://conda.anaconda.org/conda-forge/noarch/mccabe-0.7.0-pyhd8ed1ab_0.tar.bz2#34fc335fc50eef0b5ea708f2b5f54e0c -https://conda.anaconda.org/conda-forge/noarch/mistune-2.0.5-pyhd8ed1ab_0.conda#61a07195cfc935f1c1901d8ecf4af441 -https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.5-py311ha3edf6b_0.conda#7415f24f8c44e44152623d93c5015000 -https://conda.anaconda.org/conda-forge/linux-64/multidict-6.0.4-py311h2582759_0.conda#8f581c14b50f2df47a2c6bd8d230a579 +https://conda.anaconda.org/conda-forge/noarch/mistune-3.0.2-pyhd8ed1ab_0.conda#5cbee699846772cc939bef23a0d524ed +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.7-py311h9547e67_0.conda#3ac85c6c226e2a2e4b17864fc2ca88ff +https://conda.anaconda.org/conda-forge/linux-64/multidict-6.0.4-py311h459d7ec_1.conda#3dc76316237c8f7e7231d61b76c62b7c +https://conda.anaconda.org/conda-forge/noarch/munch-4.0.0-pyhd8ed1ab_0.conda#376b32e8f9d3eacbd625f37d39bd507d https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda#4eccaeba205f0aed9ac3a9ea58568ca3 -https://conda.anaconda.org/conda-forge/noarch/natsort-8.3.1-pyhd8ed1ab_0.conda#8c53149aa69bfeb045f28c92a2b2ddc7 -https://conda.anaconda.org/conda-forge/noarch/networkx-3.1-pyhd8ed1ab_0.conda#254f787d5068bc89f578bf63893ce8b4 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.24.2-py311h8e6699e_0.conda#90db8cc0dfa20853329bfc6642f887aa +https://conda.anaconda.org/conda-forge/noarch/natsort-8.4.0-pyhd8ed1ab_0.conda#70959cd1db3cf77b2a27a0836cfd08a7 +https://conda.anaconda.org/conda-forge/noarch/networkx-3.2.1-pyhd8ed1ab_0.conda#425fce3b531bed6ec3c74fab3e5f0a1c +https://conda.anaconda.org/conda-forge/linux-64/openblas-ilp64-0.3.25-pthreads_h3d04fff_0.conda#29bd5f6106f71fbaa5c557b4c22c9e0c https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda#5ce6a42505c6e9e6151c54c3ec8d68ea -https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda#91cda59e66e1e4afe9476f8ef98f5c30 +https://conda.anaconda.org/conda-forge/noarch/packaging-23.2-pyhd8ed1ab_0.conda#79002079284aa895f883c6b7f3f88fd6 https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2#457c2c8c08e54905d6954e79cb5b5db9 -https://conda.anaconda.org/conda-forge/noarch/pathspec-0.11.1-pyhd8ed1ab_0.conda#dbb80d1e8dc2dba5c8b106dc0768ad45 -https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_0.tar.bz2#89e3c7cdde7d3aaa2aee933b604dd07f -https://conda.anaconda.org/conda-forge/noarch/pluggy-1.0.0-pyhd8ed1ab_5.tar.bz2#7d301a0d25f424d96175f810935f0da9 -https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py311h2582759_0.conda#a90f8e278c1cd7064b2713e6b7db87e6 -https://conda.anaconda.org/conda-forge/noarch/py-1.11.0-pyh6c4a22f_0.tar.bz2#b4613d7e7a493916d867842a6a148054 +https://conda.anaconda.org/conda-forge/noarch/pathspec-0.12.1-pyhd8ed1ab_0.conda#17064acba08d3686f1135b5ec1b32b12 +https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda#405678b942f2481cecdb3e010f4925d9 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.1.0-pyhd8ed1ab_0.conda#45a5065664da0d1dfa8f8cd2eaf05ab9 +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.3.0-pyhd8ed1ab_0.conda#2390bd10bed1f3fdc7a537fb5a447d8d +https://conda.anaconda.org/conda-forge/linux-64/psutil-5.9.5-py311h459d7ec_1.conda#490d7fa8675afd1aa6f1b2332d156a45 https://conda.anaconda.org/conda-forge/noarch/pycodestyle-2.9.1-pyhd8ed1ab_0.tar.bz2#0191dd7efe1a94262812770183b68892 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2#076becd9e05608f8dc72757d5f3a91ff https://conda.anaconda.org/conda-forge/noarch/pyflakes-2.5.0-pyhd8ed1ab_0.tar.bz2#1b3bef4313288ae8d35b1dfba4cd84a3 -https://conda.anaconda.org/conda-forge/noarch/pygments-2.15.1-pyhd8ed1ab_0.conda#d316679235612869eba305aa7d41d9bf -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2#e8fbc1b54b25f4b08281467bc13b70cc -https://conda.anaconda.org/conda-forge/linux-64/pyrsistent-0.19.3-py311h2582759_0.conda#e53876b66dcc4ba8a0afa63cd8502ac3 +https://conda.anaconda.org/conda-forge/noarch/pygments-2.17.2-pyhd8ed1ab_0.conda#140a7f159396547e9799aa98f9f0742e +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.1-pyhd8ed1ab_0.conda#176f7d56f0cfe9008bdf1bccd7de02fb https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 -https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.17.1-pyhd8ed1ab_0.conda#dd4f393d857e9283eef2442234bd05e3 +https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.19.0-pyhd8ed1ab_0.conda#e4dbdb3585c0266b4710467fe7b75cf4 https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2023.3-pyhd8ed1ab_0.conda#2590495f608a63625e165915fb4e2e34 -https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.2.0-py311h2582759_0.conda#dfcc3e6e30d6ec2b2bb416fcd8ff4dc1 -https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3-pyhd8ed1ab_0.conda#d3076b483092a435832603243567bc31 -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0-py311hd4cff14_5.tar.bz2#da8769492e423103c59f469f4f17f8d9 -https://conda.anaconda.org/conda-forge/linux-64/pyzmq-25.1.0-py311h75c88c4_0.conda#db94a7a9e865fbfde8c023b6e8958bb2 -https://conda.anaconda.org/conda-forge/linux-64/ruamel.yaml.clib-0.2.7-py311h2582759_1.conda#5e997292429a22ad50c11af0a2cb0f08 -https://conda.anaconda.org/conda-forge/noarch/semver-3.0.0-pyhd8ed1ab_0.conda#4ed7f334acb2c73ff514e182f3d609fc +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.4.1-py311h459d7ec_0.conda#60b5332b3989fda37884b92c7afd6a91 +https://conda.anaconda.org/conda-forge/noarch/pytz-2023.3.post1-pyhd8ed1ab_0.conda#c93346b446cd08c169d843ae5fc0da97 +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py311h459d7ec_1.conda#52719a74ad130de8fb5d047dc91f247a +https://conda.anaconda.org/conda-forge/linux-64/pyzmq-25.1.2-py311h34ded2d_0.conda#819aa640a0493d4b52faf938e94d129e +https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.13.2-py311h46250e7_0.conda#c5f5089dd1fe0000fecaf0d12eca50b9 +https://conda.anaconda.org/conda-forge/linux-64/ruamel.yaml.clib-0.2.7-py311h459d7ec_2.conda#56bc3fe5180c0b23e05c7a5708153ac7 +https://conda.anaconda.org/conda-forge/noarch/semver-3.0.2-pyhd8ed1ab_0.conda#5efb3fccda53974aed800b6d575f72ed https://conda.anaconda.org/conda-forge/noarch/setoptconf-tmp-0.3.1-pyhd8ed1ab_0.tar.bz2#af3e36d4effb85b9b9f93cd1db0963df -https://conda.anaconda.org/conda-forge/noarch/setuptools-67.7.2-pyhd8ed1ab_0.conda#3b68bc43ec6baa48f7354a446267eefe -https://conda.anaconda.org/conda-forge/linux-64/simplejson-3.19.1-py311h2582759_0.conda#c58e325a8500b8755e95cf0622665840 +https://conda.anaconda.org/conda-forge/noarch/setuptools-68.2.2-pyhd8ed1ab_0.conda#fc2166155db840c634a1291a5c35a709 +https://conda.anaconda.org/conda-forge/linux-64/simplejson-3.19.2-py311h459d7ec_0.conda#d6478cbce002db6303f0d749860f3e22 https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 -https://conda.anaconda.org/conda-forge/noarch/smmap-3.0.5-pyh44b312d_0.tar.bz2#3a8dc70789709aa315325d5df06fb7e4 +https://conda.anaconda.org/conda-forge/noarch/smmap-5.0.0-pyhd8ed1ab_0.tar.bz2#62f26a3d1387acee31322208f0cfa3e0 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d -https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2#146f4541d643d48fc8a75cacf69f03ae -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.4-pyhd8ed1ab_0.conda#5a31a7d564f551d0e6dff52fd8cb5b16 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.1-pyhd8ed1ab_0.conda#6c8c4d6eb2325e59290ac6dbbeacd5f0 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_2.tar.bz2#9ff55a0901cf952f05c654394de76bf7 +https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda#3f144b2c34f8cb5a9abd9ed23a39c561 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda#da1d979339e2714c30a8e806a33ec087 https://conda.anaconda.org/conda-forge/noarch/sqlparse-0.4.4-pyhd8ed1ab_0.conda#2e2f31b3b1c866c29636377e14f8c4c6 -https://conda.anaconda.org/conda-forge/linux-64/tbb-2021.9.0-hf52228f_0.conda#f495e42d3d2020b025705625edf35490 -https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 -https://conda.anaconda.org/conda-forge/noarch/tenacity-8.2.2-pyhd8ed1ab_0.conda#7b39e842b52966a99e229739cd4dc36e +https://conda.anaconda.org/conda-forge/linux-64/tbb-2021.11.0-h00ab1b0_0.conda#fde515afbbe6e36eb4564965c20b1058 +https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda#04eedddeb68ad39871c8127dd1c21f4f +https://conda.anaconda.org/conda-forge/noarch/tenacity-8.2.3-pyhd8ed1ab_0.conda#1482e77f87c6a702a7e05ef22c9b197b https://conda.anaconda.org/conda-forge/noarch/termcolor-2.3.0-pyhd8ed1ab_0.conda#440d508f025b1692168caaf436504af3 -https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.1.0-pyh8a188c0_0.tar.bz2#a2995ee828f65687ac5b1e71a2ab1e0c +https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.2.0-pyha21a80b_0.conda#978d03388b62173b8e6f79162cf52b86 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 -https://conda.anaconda.org/conda-forge/noarch/tomlkit-0.11.8-pyha770c72_0.conda#75838e8556166263a82038b51d01d5f1 +https://conda.anaconda.org/conda-forge/noarch/tomlkit-0.12.3-pyha770c72_0.conda#074d0ce7a6261ab8b497c3518796ef3e https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2#92facfec94bc02d6ccf42e7173831a36 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.2-py311h459d7ec_0.conda#12b1c374ee90a1aa11ea921858394dc8 -https://conda.anaconda.org/conda-forge/noarch/traitlets-5.9.0-pyhd8ed1ab_0.conda#d0b4f5c87cd35ac3fb3d47b223263a64 -https://conda.anaconda.org/conda-forge/noarch/trove-classifiers-2023.5.24-pyhd8ed1ab_0.conda#4580a4f27cad1c3b275f6f6ad310abae -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.6.3-pyha770c72_0.conda#4a3014a4d107d15475d106b751c4e352 -https://conda.anaconda.org/conda-forge/linux-64/ujson-5.7.0-py311hcafe171_0.conda#ec3960b6d13bb60aad9c67f42a801720 +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.3.3-py311h459d7ec_1.conda#a700fcb5cedd3e72d0c75d095c7a6eda +https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.0-pyhd8ed1ab_0.conda#886f4a84ddb49b943b1697ac314e85b3 +https://conda.anaconda.org/conda-forge/noarch/trove-classifiers-2023.11.29-pyhd8ed1ab_0.conda#b355907cec4e6a516f2909396add77c8 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.9.0-pyha770c72_0.conda#a92a6440c3fe7052d63244f3aba2a4a7 +https://conda.anaconda.org/conda-forge/linux-64/ujson-5.9.0-py311hb755f60_0.conda#36dda52dc99a4fb9cadd3b738ec24848 https://conda.anaconda.org/conda-forge/noarch/untokenize-0.1.1-py_0.tar.bz2#1447ead40f2a01733a9c8dfc32988375 -https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-py_1.tar.bz2#3563be4c5611a44210d9ba0c16113136 +https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda#daf5160ff9cde3a468556965329085b9 https://conda.anaconda.org/conda-forge/noarch/webob-1.8.7-pyhd8ed1ab_0.tar.bz2#a8192f3585f341ea66c60c189580ac67 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.40.0-pyhd8ed1ab_0.conda#49bb0d9e60ce1db25e151780331bb5f3 -https://conda.anaconda.org/conda-forge/linux-64/wrapt-1.15.0-py311h2582759_0.conda#15565d8602a78c6a994e4d9fcb391920 -https://conda.anaconda.org/conda-forge/noarch/xlsxwriter-3.1.2-pyhd8ed1ab_0.conda#e0593431fd5e9c12824b9bfa989c9ed0 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.42.0-pyhd8ed1ab_0.conda#1cdea58981c5cbc17b51973bcaddcea7 +https://conda.anaconda.org/conda-forge/linux-64/wrapt-1.16.0-py311h459d7ec_0.conda#6669b5529d206c1f880b642cdd17ae05 +https://conda.anaconda.org/conda-forge/noarch/xlsxwriter-3.1.9-pyhd8ed1ab_0.conda#70e533db62a710ae216fdaccc4a983c8 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-5.0.3-h7f98852_1004.tar.bz2#e9a21aa4d5e3e5f1aed71e8cefd46b6a -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxt-1.3.0-hd590300_0.conda#ab2044e8d87dda9f74652e8e084a5569 -https://conda.anaconda.org/conda-forge/noarch/xyzservices-2023.5.0-pyhd8ed1ab_1.conda#232ea5ed580a598cdf887a890c29b629 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda#ed67c36f215b310412b2af935bf3e530 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxt-1.3.0-hd590300_1.conda#ae92aab42726eb29d16488924f7312cb +https://conda.anaconda.org/conda-forge/noarch/xyzservices-2023.10.1-pyhd8ed1ab_0.conda#1e0d85c0e2fef9539218da185b285f54 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 -https://conda.anaconda.org/conda-forge/noarch/zipp-3.15.0-pyhd8ed1ab_0.conda#13018819ca8f5b7cc675a8faf1f5fedf +https://conda.anaconda.org/conda-forge/noarch/zipp-3.17.0-pyhd8ed1ab_0.conda#2e4d6bc0b14e10f895fc6791a7d9b26a https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.4-pyhd8ed1ab_0.conda#46a2e6e3dfa718ce3492018d5a110dd6 https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.1-pyhd8ed1ab_0.tar.bz2#d1e1eb7e21a9e2c74279d87dafb68156 https://conda.anaconda.org/conda-forge/noarch/asgiref-3.7.2-pyhd8ed1ab_0.conda#596932155bf88bb6837141550cb721b0 -https://conda.anaconda.org/conda-forge/linux-64/astroid-2.15.5-py311h38be061_0.conda#bc99014b1cb98221bc4a0f4dc889d26f -https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.6.27-he072965_1.conda#2c7406414796748e53bd7d7c6349711d -https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.8.11-h2282364_1.conda#11a4a996699d883ebda0894faa71bbfc -https://conda.anaconda.org/conda-forge/noarch/babel-2.12.1-pyhd8ed1ab_1.conda#ac432e732804a81ddcf29c92ead57cde +https://conda.anaconda.org/conda-forge/linux-64/astroid-2.15.8-py311h38be061_0.conda#46d70fcb74472aab178991f0231ee3c6 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.0-hf8751d9_2.conda#deb12196f0c64c441bb3d083d06d0cf8 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.8.14-h2e270ba_2.conda#58bbee5fd6cf2d4fffbead1bc33a5d3b +https://conda.anaconda.org/conda-forge/noarch/babel-2.13.1-pyhd8ed1ab_0.conda#3ccff479c246692468f604df9c85ef26 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.2-pyha770c72_0.conda#a362ff7d976217f8fa78c0f1c4f59717 -https://conda.anaconda.org/conda-forge/noarch/bleach-6.0.0-pyhd8ed1ab_0.conda#d48b143d01385872a88ef8417e96c30e +https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda#0ed9d7c0e9afa7c025807a9a8136ea3e https://conda.anaconda.org/conda-forge/linux-64/brunsli-0.1-h9c3ff4c_0.tar.bz2#c1ac6229d0bfd14f8354ff9ad2a26cad https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-hbbf8b49_1016.conda#c1dd96500b9b1a75e9e511931f415cbc -https://conda.anaconda.org/conda-forge/noarch/cattrs-23.1.2-pyhd8ed1ab_0.conda#e554f60477143949704bf470f66a81e7 -https://conda.anaconda.org/conda-forge/linux-64/cffi-1.15.1-py311h409f033_3.conda#9025d0786dbbe4bc91fd8e85502decce +https://conda.anaconda.org/conda-forge/linux-64/cffi-1.16.0-py311hb3a22ac_0.conda#b3469563ac5e808b0cd92810d0697043 https://conda.anaconda.org/conda-forge/linux-64/cfitsio-4.2.0-hd9d235c_0.conda#8c57a9adbafd87f5eff842abde599cb4 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.2-py311h4c7f6c3_1.tar.bz2#c7e54004ffd03f8db0a58ab949f2a00b https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2#4fd2c6b53934bd7d96d1f3fdaf99b79f https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2#a29b7c141d6b2de4bb67788a5f107734 -https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyh24bf2e0_0.tar.bz2#b73afa0d009a51cabd3ec99c4d2ef4f3 -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.0.7-py311ha3edf6b_0.conda#e7548e7f58965a2fe97a95950a5fedc6 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.2.7-py311h459d7ec_0.conda#3c2c65575c28b23afc5e4ff721a2fc9f +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.3.2-py311h459d7ec_0.conda#7b3145fed7adc7c63a0e08f6f29f5480 https://conda.anaconda.org/conda-forge/linux-64/curl-8.1.2-h409715c_0.conda#9f88cfb15b7d08b25880b138f91e0eb4 -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.0-py311hd4cff14_1.tar.bz2#21523141b35484b1edafba962c6ea883 -https://conda.anaconda.org/conda-forge/noarch/docformatter-1.7.2-pyhd8ed1ab_0.conda#e8cba6eadd087d154ddcf494718f9f99 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.2-py311h459d7ec_1.conda#afe341dbe834ae76d2c23157ff00e633 +https://conda.anaconda.org/conda-forge/noarch/docformatter-1.7.5-pyhd8ed1ab_0.conda#3a941b6083e945aa87e739a9b85c82e9 https://conda.anaconda.org/conda-forge/noarch/docrep-0.3.2-pyh44b312d_0.tar.bz2#235523955bc1bfb019d7ec8a2bb58f9a -https://conda.anaconda.org/conda-forge/noarch/eofs-1.4.0-py_0.tar.bz2#6e166cd37cfeadefcfca1ffe00f222bb https://conda.anaconda.org/conda-forge/noarch/fire-0.5.0-pyhd8ed1ab_0.conda#9fd22aae8d2f319e80f68b295ab91d64 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.39.4-py311h459d7ec_0.conda#ddd2cd004e10bc7a1e042283326cbf91 -https://conda.anaconda.org/conda-forge/noarch/geopy-2.3.0-pyhd8ed1ab_0.tar.bz2#529faeecd6eee3a3b782566ddf05ce92 -https://conda.anaconda.org/conda-forge/noarch/gitdb-4.0.10-pyhd8ed1ab_0.conda#3706d2f3d7cb5dae600c833345a76132 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.46.0-py311h459d7ec_0.conda#a14114f70e23f7fd5ab9941fec45b095 +https://conda.anaconda.org/conda-forge/noarch/geopy-2.4.1-pyhd8ed1ab_0.conda#c75621ce68f6570fff9a6734cf21c9a7 +https://conda.anaconda.org/conda-forge/noarch/gitdb-4.0.11-pyhd8ed1ab_0.conda#623b19f616f2ca0c261441067e18ae40 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.0-nompi_hb72d44e_103.conda#975973a4350ab45ff1981fe535a12af5 -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-6.6.0-pyha770c72_0.conda#f91a5d5175fb7ff2a91952ec7da59cb9 -https://conda.anaconda.org/conda-forge/noarch/importlib_resources-5.12.0-pyhd8ed1ab_0.conda#e5fd2260a231ee63b6969f4801082f2b +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-7.0.0-pyha770c72_0.conda#a941237cd06538837b25cd245fcd25d8 +https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.1.1-pyhd8ed1ab_0.conda#3d5fa25cf42f3f32a12b2d874ace8574 https://conda.anaconda.org/conda-forge/noarch/isodate-0.6.1-pyhd8ed1ab_0.tar.bz2#4a62c93c1b5c0b920508ae3fd285eaf5 https://conda.anaconda.org/conda-forge/noarch/isort-5.12.0-pyhd8ed1ab_1.conda#07ed3421bad60867234c7a9282ea39d4 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2#c8490ed5c70966d232fdd389d0dbed37 -https://conda.anaconda.org/conda-forge/noarch/joblib-1.2.0-pyhd8ed1ab_0.tar.bz2#7583652522d71ad78ba536bba06940eb -https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.2.2-pyhd8ed1ab_0.tar.bz2#243f63592c8e449f40cd42eb5cf32f40 +https://conda.anaconda.org/conda-forge/noarch/joblib-1.3.2-pyhd8ed1ab_0.conda#4da50d410f553db77e62ab62ffaa1abc +https://conda.anaconda.org/conda-forge/linux-64/jupyter_core-5.5.0-py311h38be061_0.conda#cee83be29258275f75029125e186ab6d +https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_0.conda#3f0915b1fb2252ab73686a533c5f9d3f https://conda.anaconda.org/conda-forge/noarch/latexcodec-2.0.1-pyh9f0ad1d_0.tar.bz2#8d67904973263afd2985ba56aa2d6bb4 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-20_linux64_openblas.conda#36d486d72ab64ffea932329a1d3729a3 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hfa28ad5_6.conda#ef06bee47510a7f5db3c2297a51d6ce2 -https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.11.0-hac9eb74_1.conda#f463669862853ddbb192c810aac4390e +https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.0-hac7e632_1003.conda#50c389a09b6b7babaef531eb7cb5e0ca +https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.12.0-h840a212_1.conda#03c225a73835f5aa68c13e62eb360406 +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-20_linux64_openblas.conda#6fabc51f5e647d09cc010c40061557e0 https://conda.anaconda.org/conda-forge/noarch/logilab-common-1.7.3-py_0.tar.bz2#6eafcdf39a7eb90b6d951cfff59e8d3b -https://conda.anaconda.org/conda-forge/noarch/magics-python-1.5.8-pyhd8ed1ab_1.conda#3fd7e3db129f12362642108f23fde521 -https://conda.anaconda.org/conda-forge/noarch/munch-3.0.0-pyhd8ed1ab_0.conda#3d5fa8396d78c916d51fb1c6cda24945 https://conda.anaconda.org/conda-forge/noarch/nested-lookup-0.2.25-pyhd8ed1ab_1.tar.bz2#2f59daeb14581d41b1e2dda0895933b2 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.8.0-pyhd8ed1ab_0.conda#2a75b296096adabbabadd5e9782e5fcc -https://conda.anaconda.org/conda-forge/linux-64/numba-0.57.0-py311h96b013e_1.conda#85bb20481daadd8153e9d6366f1c0edc -https://conda.anaconda.org/conda-forge/linux-64/numcodecs-0.11.0-py311hcafe171_1.conda#ecdaf0772e524ed51218f6d52ef74424 -https://conda.anaconda.org/conda-forge/linux-64/openpyxl-3.1.2-py311h459d7ec_0.conda#3fd48307e8596409a2a55d516fa3ad1f -https://conda.anaconda.org/conda-forge/noarch/partd-1.4.0-pyhd8ed1ab_0.conda#721dab5803ea92ce02ddc4ee50aa0c48 -https://conda.anaconda.org/conda-forge/linux-64/pillow-9.5.0-py311h0b84326_1.conda#6be2190fdbf26a6c1d3356a54d955237 -https://conda.anaconda.org/conda-forge/noarch/pip-23.1.2-pyhd8ed1ab_0.conda#7288da0d36821349cf1126e8670292df -https://conda.anaconda.org/conda-forge/noarch/plotly-5.15.0-pyhd8ed1ab_0.conda#48573e7cca7860509648522a3b8507d7 -https://conda.anaconda.org/conda-forge/linux-64/postgresql-15.3-h814edd5_0.conda#c72622dbd4193522a0b568886b63048d -https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.0-h8ffa02c_0.conda#8b9dcfabec5c6bcac98e89889fffa64e +https://conda.anaconda.org/conda-forge/linux-64/openpyxl-3.1.2-py311h459d7ec_1.conda#5c809fb753f06a04c2f114394404769e +https://conda.anaconda.org/conda-forge/noarch/partd-1.4.1-pyhd8ed1ab_0.conda#acf4b7c0bcd5fa3b0e05801c4d2accd6 +https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.0-py311h0b84326_0.conda#4b24acdc1fbbae9da03147e7d2cf8c8a +https://conda.anaconda.org/conda-forge/noarch/pip-23.3.1-pyhd8ed1ab_0.conda#2400c0b86889f43aa52067161e1fb108 +https://conda.anaconda.org/conda-forge/noarch/plotly-5.18.0-pyhd8ed1ab_0.conda#9f6a8664f1fe752f79473eeb9bf33a60 +https://conda.anaconda.org/conda-forge/linux-64/postgresql-15.3-hd458b1d_1.conda#4a4b5dede4d2e075e9aa5a44a9fd9f20 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.1-ha643af7_0.conda#e992387307f4403ba0ec07d009032550 https://conda.anaconda.org/conda-forge/noarch/pydocstyle-6.3.0-pyhd8ed1ab_0.conda#7e23a61a7fbaedfef6eb0e1ac775c8e5 https://conda.anaconda.org/conda-forge/noarch/pyproject_hooks-1.0.0-pyhd8ed1ab_0.conda#21de50391d584eb7f4441b9de1ad773f +https://conda.anaconda.org/conda-forge/noarch/pytest-7.4.3-pyhd8ed1ab_0.conda#5bdca0aca30b0ee62bb84854e027eae0 https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2#dd999d1cc9f79e67dbb855c8924c7984 -https://conda.anaconda.org/conda-forge/noarch/python-utils-3.6.0-pyhd8ed1ab_0.conda#64f841b5319404bbc5962fa8bd7b860a -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py311hcb2cf0a_0.conda#272ca0c28df344037ba2c4982d4e4791 +https://conda.anaconda.org/conda-forge/noarch/python-utils-3.8.1-pyhd8ed1ab_0.conda#4dc77041fea14c63ecea50ce0eb92873 +https://conda.anaconda.org/conda-forge/noarch/referencing-0.32.0-pyhd8ed1ab_0.conda#a7b5a535cd614e384594530aee7e6061 https://conda.anaconda.org/conda-forge/noarch/retrying-1.3.3-py_2.tar.bz2#a11f356d6f93b74b4a84e9501afd48b4 -https://conda.anaconda.org/conda-forge/linux-64/ruamel.yaml-0.17.31-py311h459d7ec_0.conda#f0fc1409f49257fe5ec2d86d0595d9bc -https://conda.anaconda.org/conda-forge/noarch/seawater-3.3.4-py_1.tar.bz2#a9e101e1601faf5e5a119ab2bd7617a4 -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py311h54d622a_1.conda#a894c65b48676c4973e9ee8b59bceb9e -https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-py_0.tar.bz2#cb83a3d6ecf73f50117635192414426a -https://conda.anaconda.org/conda-forge/linux-64/suitesparse-5.10.1-h9e50725_1.tar.bz2#a3a685b5f9aeb890ed874502fe56accf +https://conda.anaconda.org/conda-forge/linux-64/ruamel.yaml-0.18.5-py311h459d7ec_0.conda#1101ec27377f8e45d8431a5f21d744f1 https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.2.1-pyhd8ed1ab_0.tar.bz2#7234c9eefff659501cd2fe0d2ede4d48 -https://conda.anaconda.org/conda-forge/noarch/tqdm-4.65.0-pyhd8ed1ab_1.conda#ed792aff3acb977d09c7013358097f83 -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.6.3-hd8ed1ab_0.conda#3876f650ed7d0f95d70fa4b647621909 +https://conda.anaconda.org/conda-forge/noarch/tqdm-4.66.1-pyhd8ed1ab_0.conda#03c97908b976498dcae97eb4e4f3149c +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.9.0-hd8ed1ab_0.conda#c16524c1b7227dc80b36b4fa6f77cc86 https://conda.anaconda.org/conda-forge/noarch/url-normalize-1.4.3-pyhd8ed1ab_0.tar.bz2#7c4076e494f0efe76705154ac9302ba6 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.3-pyhd8ed1ab_0.conda#ae465d0fbf9f1979cb2d8d4043d885e2 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.1.0-pyhd8ed1ab_0.conda#f8ced8ee63830dec7ecc1be048d1470a +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.25.0-pyhd8ed1ab_0.conda#c119653cba436d8183c27bf6d190e587 https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.4-h8d71039_2.conda#6d5edbe22b07abae2ea0a9065ef6be12 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.7.10-h7f98852_0.tar.bz2#e77615e5141cad5a2acaa043d1cf0ca5 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxmu-1.1.3-h7f98852_0.tar.bz2#3cdb89236358326adfce12be820a8af3 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxpm-3.5.16-hd590300_0.conda#7a2672267d49208afe2df6cbef8a6a79 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxpm-3.5.17-hd590300_0.conda#12bf78e12f71405775e1c092902959d3 https://conda.anaconda.org/conda-forge/noarch/yamale-4.0.4-pyh6c4a22f_0.tar.bz2#cc9f59f147740d88679bf1bd94dbe588 -https://conda.anaconda.org/conda-forge/noarch/yamllint-1.32.0-pyhd8ed1ab_0.conda#6d2425548b0293a225ca4febd80feaa3 -https://conda.anaconda.org/conda-forge/noarch/yapf-0.33.0-pyhd8ed1ab_1.conda#ea4867f364b3f7f48c67643028c7f4c6 -https://conda.anaconda.org/conda-forge/linux-64/yarl-1.9.2-py311h459d7ec_0.conda#4d738187d20e0a3be66973860f134e0a -https://conda.anaconda.org/conda-forge/noarch/async-timeout-4.0.2-pyhd8ed1ab_0.tar.bz2#25e79f9a1133556671becbd65a170c78 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.3.0-hcb5a9b2_2.conda#e32991aa713aafc13ae31869d44e04ad -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h1f0f07a_0.conda#43a71a823583d75308eaf3a06c8f150b -https://conda.anaconda.org/conda-forge/linux-64/cryptography-41.0.1-py311h63ff55d_0.conda#69ad01f66b8efff535d341ba5b283c2c -https://conda.anaconda.org/conda-forge/noarch/django-4.2.2-pyhd8ed1ab_0.conda#31af05cc9ec79e8eaa8c452a00fb33f7 +https://conda.anaconda.org/conda-forge/noarch/yamllint-1.33.0-pyhd8ed1ab_0.conda#57d32eb2c4b76ef288f9dd789f8fe5af +https://conda.anaconda.org/conda-forge/linux-64/yarl-1.9.3-py311h459d7ec_0.conda#96f995652440b0a9266d66a691d9eff9 +https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.9.1-py311h459d7ec_0.conda#a51ceb9a9219e3c11af56b2b77794839 +https://conda.anaconda.org/conda-forge/linux-64/arpack-3.7.0-hdefa2d7_2.tar.bz2#8763fe86163198ef1778d1d8d22bb078 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.3.13-heb0bb06_2.conda#c0866da05d5e7bb3a3f6b68bcbf7537b +https://conda.anaconda.org/conda-forge/noarch/cattrs-23.2.3-pyhd8ed1ab_0.conda#91fc4700dcce4a46d439900a132fe4e5 +https://conda.anaconda.org/conda-forge/linux-64/cryptography-41.0.7-py311hcb13ee4_1.conda#ca6e04ac7262ecaec846e483d6fdc6c8 +https://conda.anaconda.org/conda-forge/noarch/django-5.0-pyhd8ed1ab_0.conda#9af5d8131de6eb7b0f2f167fba3bdff7 https://conda.anaconda.org/conda-forge/noarch/flake8-5.0.4-pyhd8ed1ab_0.tar.bz2#8079ea7dec0a917dd0cb6c257f7ea9ea https://conda.anaconda.org/conda-forge/linux-64/freeglut-3.2.2-hac7e632_2.conda#6e553df297f6e64668efb54302e0f139 https://conda.anaconda.org/conda-forge/noarch/funcargparse-0.2.5-pyhd8ed1ab_0.tar.bz2#e557b70d736251fa0bbb7c4497852a92 -https://conda.anaconda.org/conda-forge/linux-64/geotiff-1.7.1-h480ec47_8.conda#7d750f8e82a1b626b383b5039a3de0c7 -https://conda.anaconda.org/conda-forge/linux-64/git-2.41.0-pl5321h86e50cf_0.conda#14f8341e26b274362b026bbdc72b14fb -https://conda.anaconda.org/conda-forge/noarch/gitpython-3.1.31-pyhd8ed1ab_0.conda#f6e6b482110246a81c3f03e81c68752d +https://conda.anaconda.org/conda-forge/linux-64/geotiff-1.7.1-h22adcc9_11.conda#514167b60f598eaed3f7a60e1dceb9ee +https://conda.anaconda.org/conda-forge/linux-64/git-2.42.0-pl5321h86e50cf_0.conda#96ad24c67e0056d171385859c43218a2 +https://conda.anaconda.org/conda-forge/noarch/gitpython-3.1.40-pyhd8ed1ab_0.conda#6bf74c3b7c13079a91d4bd3da51cefcf +https://conda.anaconda.org/conda-forge/linux-64/gsl-2.7-he838d99_0.tar.bz2#fec079ba39c9cca093bf4c00001825de https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-7.3.0-hdb3a94d_0.conda#765bc76c0dfaf24ff9d8a2935b2510df https://conda.anaconda.org/conda-forge/linux-64/hdfeos5-5.1.16-h8b5b2df_13.conda#29a96d50cb53638a5b4806b5ca6e4b1d -https://conda.anaconda.org/conda-forge/linux-64/imagecodecs-2023.1.23-py311hd374d05_2.conda#49daf4cf57e732fe804b5b63b60a65a6 -https://conda.anaconda.org/conda-forge/noarch/imageio-2.28.1-pyh24c5eb1_0.conda#ef3541a8cd9a55879932486a097b7fed -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-6.6.0-hd8ed1ab_0.conda#3cbc9615f10a3d471532b83e4250b971 -https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.17.3-pyhd8ed1ab_0.conda#723268a468177cd44568eb8f794e0d80 +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-7.0.0-hd8ed1ab_0.conda#12aff14f84c337be5e5636bf612f4140 +https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2023.11.2-pyhd8ed1ab_0.conda#73884ca36d6d96cbce498cde99fab40f https://conda.anaconda.org/conda-forge/linux-64/kealib-1.5.1-h3845be2_3.conda#f38e5e47f62d6633166040192ad420a1 -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_hdf9a29f_104.conda#283aeeef04e2a01445156c9c2d5c4fa0 -https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.0.1-h7d1ca68_25.conda#c5ff4b64ee24804cad5ddb4239267b09 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.7.1-py311h8597a09_0.conda#70c3b734ffe82c16b6d121aaa11929a8 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.0.2-py311h320fe9a_0.conda#509769b430266dc5c2f6a3eab0f23164 -https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.5.3-pyhd8ed1ab_0.conda#c085a16ba3d0c9ee282c438308b57724 +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h0f3d0bb_105.conda#b5d412441b84305460e9df8a016a3392 +https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.0.1-hca56755_27.conda#918a735059cab21b96fc13a8d04fbcd8 +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.2-py311h64a7726_0.conda#fd2f142dcd680413b5ede5d0fb799205 https://conda.anaconda.org/conda-forge/linux-64/poppler-23.05.0-hd18248d_1.conda#09e0de1aa7330fe697eed76eaeef666d https://conda.anaconda.org/conda-forge/noarch/progressbar2-4.2.0-pyhd8ed1ab_0.tar.bz2#d883564cf1e9ba190f6b285036c5f949 https://conda.anaconda.org/conda-forge/noarch/pybtex-0.24.0-pyhd8ed1ab_2.tar.bz2#2099b86a7399c44c0c61cdb6de6915ba -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.5.0-py311h1850bce_1.conda#572159a946b809df471b11db4995c708 -https://conda.anaconda.org/conda-forge/noarch/pytest-7.3.1-pyhd8ed1ab_0.conda#547c7de697ec99b494a28ddde185b5a4 -https://conda.anaconda.org/conda-forge/noarch/python-build-0.10.0-pyhd8ed1ab_1.conda#0ab47ce574f6a8bcb9f2076436e7fedb -https://conda.anaconda.org/conda-forge/noarch/rdflib-6.3.2-pyhd8ed1ab_0.conda#ef37f754e65328229ecf4488b5909b8d +https://conda.anaconda.org/conda-forge/noarch/pylint-2.17.7-pyhd8ed1ab_0.conda#3cab6aee60038b3f621bce3e50f52bed +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py311ha169711_0.conda#ad4b6e9be79a89959bb6d7d308027ff2 +https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da +https://conda.anaconda.org/conda-forge/noarch/pytest-env-1.1.3-pyhd8ed1ab_0.conda#1dbdf019d740419852c4a7803fff49d9 +https://conda.anaconda.org/conda-forge/noarch/pytest-metadata-3.0.0-pyhd8ed1ab_1.conda#8bdcc0f401561213821bf67513abeeff +https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.12.0-pyhd8ed1ab_0.conda#ac9fedc9a0c397f2318e82525491dd83 +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.5.0-pyhd8ed1ab_0.conda#d5f595da2daead898ca958ac62f0307b +https://conda.anaconda.org/conda-forge/noarch/python-build-1.0.3-pyhd8ed1ab_0.conda#d9ccabf228cb98419ca3d5694b25e1a2 +https://conda.anaconda.org/conda-forge/noarch/rdflib-7.0.0-pyhd8ed1ab_0.conda#44d14ef95495b3d4438f28998e0296a9 https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda#a30144e4156cdbb236f99ebb49828f8b https://conda.anaconda.org/conda-forge/noarch/requirements-detector-1.2.2-pyhd8ed1ab_0.conda#6626918380d99292df110f3c91b6e5ec +https://conda.anaconda.org/conda-forge/linux-64/suitesparse-5.10.1-h3ec001c_2.conda#9b37d27528c5f86ee09bf1fc6834da8e https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.13.2-hd532e3d_0.conda#6d97164f19dbd27575ef1899b02dc1e0 -https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311h4dd048b_3.tar.bz2#dbfea4376856bf7bd2121e719cf816e5 +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311h9547e67_4.conda#586da7df03b68640de14dc3e8bcbf76f https://conda.anaconda.org/conda-forge/linux-64/xorg-libxaw-1.0.14-h7f98852_1.tar.bz2#45b68dc2fc7549c16044d533ceaf340e -https://conda.anaconda.org/conda-forge/noarch/zarr-2.14.2-pyhd8ed1ab_0.conda#0c5776fe65a12a421d7ddf90411a6c3f -https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.8.4-py311h459d7ec_1.conda#649386bf24f512a0593a83f59d2b7172 -https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.20.2-he0fdcb3_0.conda#3d9577a30f0e61331216b381925aa3e3 -https://conda.anaconda.org/conda-forge/noarch/bokeh-3.1.1-pyhd8ed1ab_0.conda#07401431ba1c7fae695814ae3528312a +https://conda.anaconda.org/conda-forge/noarch/yapf-0.40.1-pyhd8ed1ab_0.conda#f269942e802d5e148632143d4c37acc9 +https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.20.3-he9c0e7f_4.conda#7695770e1d722ce9029a2ea30c060a3d https://conda.anaconda.org/conda-forge/noarch/cdsapi-0.6.1-pyhd8ed1ab_0.conda#454ed214cec806066097ae245a409171 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.3-py311h1f0f07a_0.conda#b7e6d52b39e199238c3400cafaabafb3 https://conda.anaconda.org/conda-forge/noarch/chart-studio-1.1.0-pyh9f0ad1d_0.tar.bz2#acd9a12a35e5a0221bdf39eb6e4811dc -https://conda.anaconda.org/conda-forge/noarch/cmocean-3.0.3-pyhd8ed1ab_0.conda#eec7df83d725696d32c7bf99aff21d82 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.6.0-pyhd8ed1ab_0.conda#e2c66ccd8a5eedaddcb23739ed38ed27 +https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyh24bf2e0_0.tar.bz2#b73afa0d009a51cabd3ec99c4d2ef4f3 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.2.0-py311h9547e67_0.conda#40828c5b36ef52433e21f89943e09f33 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.12.0-pyhd8ed1ab_0.conda#95eae0785aed72998493140dc0115382 +https://conda.anaconda.org/conda-forge/noarch/eofs-1.4.0-py_0.tar.bz2#6e166cd37cfeadefcfca1ffe00f222bb https://conda.anaconda.org/conda-forge/noarch/flake8-polyfill-1.0.2-py_0.tar.bz2#a53db35e3d07f0af2eccd59c2a00bffe -https://conda.anaconda.org/conda-forge/noarch/identify-2.5.24-pyhd8ed1ab_0.conda#a4085ab0562d5081a9333435837b538a +https://conda.anaconda.org/conda-forge/noarch/identify-2.5.33-pyhd8ed1ab_0.conda#93c8f8ceb83827d88deeba796f07fba7 +https://conda.anaconda.org/conda-forge/linux-64/imagecodecs-2023.8.12-py311h67b54e4_0.conda#363e5c2f2c67ff69d717aba54422b03d +https://conda.anaconda.org/conda-forge/noarch/imageio-2.31.5-pyh8c1a49c_0.conda#6820ccf6a3a27df348f18c85dd89014a https://conda.anaconda.org/conda-forge/linux-64/jasper-4.0.0-h32699f2_1.conda#fdde5424ecef5f7ad310b4242229291c +https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.20.0-pyhd8ed1ab_0.conda#1116d79def5268414fb0917520b2bbf1 https://conda.anaconda.org/conda-forge/linux-64/julia-1.8.5-h783901f_0.conda#98c05ba7ca9c15d22216f730499e167a -https://conda.anaconda.org/conda-forge/linux-64/jupyter_core-5.3.0-py311h38be061_0.conda#1dd43a18a75d59206019e2a2a28555e5 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.7.0-he76be6c_0.conda#2e2c887d9a55b287982c1bf3d7013fb1 -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 +https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.0-pyhd8ed1ab_0.conda#6bd3f1069cdebb44c7ae9efb900e312d +https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.7.0-h5418a03_2.conda#30ddbe080c260fb36da8509e3fd6c45f +https://conda.anaconda.org/conda-forge/noarch/magics-python-1.5.8-pyhd8ed1ab_1.conda#3fd7e3db129f12362642108f23fde521 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_h4f3791c_100.conda#405c5b3ad4ef53eb0d93043b54206dd7 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py311h4d7c953_100.conda#c03492d0342e512e58aa2d6c5fdaaa91 +https://conda.anaconda.org/conda-forge/linux-64/numba-0.58.1-py311h96b013e_0.conda#06a0313ff3d2ec956a25767ccaf7c9f6 +https://conda.anaconda.org/conda-forge/linux-64/numcodecs-0.12.1-py311hb755f60_0.conda#38a2ff8ea433fe8792279b45e84b3730 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.4-py311h320fe9a_0.conda#e44ccb61b6621bf3f8053ae66eba7397 https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-heaa33ce_1.conda#cde553e0e32389e26595db4eacf859eb -https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda#5936894aade8240c867d292aa0d980c6 -https://conda.anaconda.org/conda-forge/noarch/pylint-2.17.4-pyhd8ed1ab_0.conda#a9d97fe4617aba393d90ea81576b6b46 -https://conda.anaconda.org/conda-forge/noarch/pyopenssl-23.2.0-pyhd8ed1ab_1.conda#34f7d568bf59d18e3fef8c405cbece21 +https://conda.anaconda.org/conda-forge/noarch/patsy-0.5.4-pyhd8ed1ab_0.conda#1184267eddebb57e47f8e1419c225595 +https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.0-pyhd8ed1ab_0.conda#134b2b57b7865d2316a7cce1915a51ed +https://conda.anaconda.org/conda-forge/noarch/pylint-plugin-utils-0.7-pyhd8ed1ab_0.tar.bz2#1657976383aee04dbb3ae3bdf654bb58 +https://conda.anaconda.org/conda-forge/noarch/pyopenssl-23.3.0-pyhd8ed1ab_0.conda#7819533e674dbbc51468f3228b9b1bb6 https://conda.anaconda.org/conda-forge/noarch/pyroma-4.2-pyhd8ed1ab_0.conda#fe2aca9a5d4cb08105aefc451ef96950 -https://conda.anaconda.org/conda-forge/noarch/pytest-cov-4.1.0-pyhd8ed1ab_0.conda#06eb685a3a0b146347a58dda979485da -https://conda.anaconda.org/conda-forge/noarch/pytest-env-0.8.1-pyhd8ed1ab_0.conda#56466a4061d4c1150f6fe52235019bf8 -https://conda.anaconda.org/conda-forge/noarch/pytest-metadata-3.0.0-pyhd8ed1ab_1.conda#8bdcc0f401561213821bf67513abeeff -https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.10.0-pyhd8ed1ab_0.tar.bz2#db93caa9fe182f0cd20291aeb22f57ac -https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.3.1-pyhd8ed1ab_0.conda#816073bb54ef59f33f0f26c14f88311b -https://conda.anaconda.org/conda-forge/noarch/requests-cache-1.0.1-pyhd8ed1ab_0.conda#43ec7b3627237e5fe23413e314e8ba4c -https://conda.anaconda.org/conda-forge/noarch/sphinx-7.0.1-pyhd8ed1ab_0.conda#51a8d037b28276b4f68263e890e0f35b -https://conda.anaconda.org/conda-forge/linux-64/tempest-remap-2.1.6-hd5eb6f5_4.conda#57ce81f02413a1635553b5e4ab149d41 -https://conda.anaconda.org/conda-forge/noarch/tifffile-2023.4.12-pyhd8ed1ab_0.conda#b2ade33a630dada190c1220f3515fc5c -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.23.0-pyhd8ed1ab_0.conda#a920e114c4c2ced2280e266da65ab5e6 -https://conda.anaconda.org/conda-forge/noarch/xarray-2023.5.0-pyhd8ed1ab_0.conda#254b5553bed6adf404ac09fa07cb54da -https://conda.anaconda.org/conda-forge/noarch/autodocsumm-0.2.6-pyhd8ed1ab_0.tar.bz2#4409dd7e06a62c3b2aa9e96782c49c6d -https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.10.57-h059227d_13.conda#16eac1f53808f188a44cb0dcb59b109b -https://conda.anaconda.org/conda-forge/noarch/cf_xarray-0.8.1-pyhd8ed1ab_0.conda#3c3cdc59ff9c8e1f1c9d6d3c362ce778 -https://conda.anaconda.org/conda-forge/noarch/distributed-2023.6.0-pyhd8ed1ab_0.conda#4ec79a27574d70c947faf0a51bbe4079 +https://conda.anaconda.org/conda-forge/noarch/pytest-html-4.1.1-pyhd8ed1ab_0.conda#4d2040212307d18392a2687772b3a96d +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.4.1-py311h1f0f07a_1.conda#86b71ff85f3e4c8a98b5bace6d9c4565 +https://conda.anaconda.org/conda-forge/noarch/requests-cache-1.1.1-pyhd8ed1ab_0.conda#29bf13210ee541c59166cea092b91080 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.4-py311h64a7726_0.conda#9ac5334f1b5ed072d3dbc342503d7868 +https://conda.anaconda.org/conda-forge/noarch/seawater-3.3.4-py_1.tar.bz2#a9e101e1601faf5e5a119ab2bd7617a4 +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py311h54d622a_1.conda#a894c65b48676c4973e9ee8b59bceb9e +https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-py_0.tar.bz2#cb83a3d6ecf73f50117635192414426a +https://conda.anaconda.org/conda-forge/linux-64/tempest-remap-2.2.0-h43474b4_0.conda#fd815765a86daf44db1e15c6f6edf5e6 +https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.10.57-hbc2ea52_17.conda#452c7b08c21eea2ef01f4fd364d6affc +https://conda.anaconda.org/conda-forge/noarch/bokeh-3.3.2-pyhd8ed1ab_0.conda#c02a7e79365121bd3bcc25f1b65f48a9 +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h1f0f07a_4.conda#1e105c1a8ea2163507726144b401eb1b +https://conda.anaconda.org/conda-forge/noarch/distributed-2023.12.0-pyhd8ed1ab_0.conda#22d620e1079e99c34578cb0c615d2789 https://conda.anaconda.org/conda-forge/linux-64/eccodes-2.30.2-h1f30a5c_0.conda#21ee8444a7f629924ea8cfe52a622cbd https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h20110ff_0.conda#11f5169aeff54ad7277476be8ba19ff7 -https://conda.anaconda.org/conda-forge/linux-64/gdal-3.7.0-py311h6122507_0.conda#f45a9655a8fd47b8187ae00972f4b4e7 +https://conda.anaconda.org/conda-forge/linux-64/gdal-3.7.0-py311h281082f_2.conda#fde4fad3c517cc80f32995696f45198d https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 -https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.2.0-pyhd8ed1ab_0.conda#58ca2d50c3b27b86fd7df62eaadbf9a9 -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.0-h5cef280_0.conda#1ec4fab6eb4af1db9056b94265fe19cf -https://conda.anaconda.org/conda-forge/noarch/myproxyclient-2.1.0-pyhd8ed1ab_2.tar.bz2#363b0816e411feb0df925d4f224f026a -https://conda.anaconda.org/conda-forge/noarch/nbformat-5.9.0-pyhd8ed1ab_0.conda#f525a01528c3eba1d381a232a6971c6a +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-h98fae49_0.conda#620e754f4344f4c27259ff460a2b9c50 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.8.2-py311h54ef318_0.conda#9f80753bc008bfc9b95f39d9ff9f1694 +https://conda.anaconda.org/conda-forge/noarch/myproxyclient-2.1.1-pyhd8ed1ab_0.conda#bcdbeb2b693eba886583a907840c6421 +https://conda.anaconda.org/conda-forge/noarch/nbformat-5.9.2-pyhd8ed1ab_0.conda#61ba076de6530d9301a0053b02f093d2 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.6.4-nompi_py311h4d7c953_100.conda#c03492d0342e512e58aa2d6c5fdaaa91 https://conda.anaconda.org/conda-forge/noarch/pep8-naming-0.10.0-pyh9f0ad1d_0.tar.bz2#b3c5536e4f9f58a4b16adb6f1e11732d -https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.3.2-pyha770c72_0.conda#dbb0111b18ea5c9983fb8db0aef6000b -https://conda.anaconda.org/conda-forge/linux-64/psyplot-1.4.3-py311h38be061_1.tar.bz2#f0c9a1067c03e8f05e53ef0c5ad5fab3 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.13.3-pyhd8ed1ab_0.conda#07aca5f2dea315dcc16680d6891e9056 -https://conda.anaconda.org/conda-forge/noarch/pylint-plugin-utils-0.7-pyhd8ed1ab_0.tar.bz2#1657976383aee04dbb3ae3bdf654bb58 -https://conda.anaconda.org/conda-forge/noarch/pytest-html-3.2.0-pyhd8ed1ab_1.tar.bz2#d5c7a941dfbceaab4b172a56d7918eb0 -https://conda.anaconda.org/conda-forge/noarch/pytest-json-report-1.5.0-pyhd8ed1ab_0.tar.bz2#837e335fa428cf7c784ee2e80594506c -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py311h1f0f07a_0.conda#3a00b1b08d8c01b1a3bfa686b9152df2 -https://conda.anaconda.org/conda-forge/linux-64/r-base-4.1.3-h0fc540b_8.conda#4c7a8c23a6be7cb6385ed2035fe147b8 -https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.3.7-py311h138ec3c_1.conda#1baa5b82f2a746b7163a0b17e89439ff -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.10.1-py311h64a7726_3.conda#a01a3a7428e770db5a0c8c7ab5fce7f7 -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.21.1-py311hd88b842_1.conda#f19feb9440890ccb806a367ea9ae0654 -https://conda.anaconda.org/conda-forge/noarch/esgf-pyclient-0.3.1-pyh1a96a4e_2.tar.bz2#64068564a9c2932bf30e9b4ec567927d -https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_1.conda#4067029ad6872d49f6d43c05dd1f51a9 -https://conda.anaconda.org/conda-forge/linux-64/fiona-1.9.4-py311hbac4ec9_0.conda#1d3445f5f7fa002a1c149c405376f012 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.0.5-h28d9a01_0.conda#597e2d0e1c6bc2e4457714ff479fe142 -https://conda.anaconda.org/conda-forge/linux-64/libarrow-12.0.0-hed73b3e_7_cpu.conda#b7751e77fef9bec5135f4a6cb3ceb7b3 -https://conda.anaconda.org/conda-forge/linux-64/magics-4.13.0-h8ea9e15_4.conda#3f714a7ce0c1c9a6195d89d0792381a4 -https://conda.anaconda.org/conda-forge/noarch/nbclient-0.8.0-pyhd8ed1ab_0.conda#e78da91cf428faaf05701ce8cc8f2f9b -https://conda.anaconda.org/conda-forge/linux-64/ncl-6.6.2-hcf71a85_46.conda#5b95cd07906b1eabe73058f69a1b7e16 -https://conda.anaconda.org/conda-forge/linux-64/nco-5.1.6-hd62b316_0.conda#af7780f76ee37325d264327e21a478f5 -https://conda.anaconda.org/conda-forge/noarch/patsy-0.5.3-pyhd8ed1ab_0.tar.bz2#50ef6b29b1fb0768ca82c5aeb4fb2d96 -https://conda.anaconda.org/conda-forge/linux-64/psy-simple-1.4.1-py311h38be061_2.tar.bz2#4c9101d329f6bc09c2617a80e3eb9c89 -https://conda.anaconda.org/conda-forge/noarch/py-cordex-0.5.2-pyhd8ed1ab_0.conda#1de2b64c99d5b4e8413823047c0dbf7c +https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.6.0-pyha770c72_0.conda#473a7cfca197da0a10cff3f6dded7d4b https://conda.anaconda.org/conda-forge/noarch/pylint-celery-0.3-py_1.tar.bz2#e29456a611a62d3f26105a2f9c68f759 https://conda.anaconda.org/conda-forge/noarch/pylint-django-2.5.3-pyhd8ed1ab_0.tar.bz2#00d8853fb1f87195722ea6a582cc9b56 https://conda.anaconda.org/conda-forge/noarch/pylint-flask-0.6-py_0.tar.bz2#5a9afd3d0a61b08d59eed70fab859c1b +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py311h1f0f07a_1.conda#cd36a89a048ad2bcc6d8b43f648fb1d0 +https://conda.anaconda.org/conda-forge/linux-64/r-base-4.1.3-hfabd6f2_9.conda#0ab4cf54fbddc0cc9ff260c6f77f8c84 +https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.3.8-py311h41e4db2_0.conda#b35deb26af1d7e0d98438c8ac5c6b7b2 +https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.3.2-py311hc009520_2.conda#9821f8e497a791858226f535e5e0be62 +https://conda.anaconda.org/conda-forge/noarch/sparse-0.14.0-pyhd8ed1ab_0.conda#ee01b310177a0612554b9d20e537fdbe +https://conda.anaconda.org/conda-forge/linux-64/statsmodels-0.14.0-py311h1f0f07a_2.conda#8f1e772e5430ce48229740ec00a90b61 +https://conda.anaconda.org/conda-forge/noarch/tifffile-2023.12.9-pyhd8ed1ab_0.conda#454bc0aff84f35fa53ba9e0369737a9b +https://conda.anaconda.org/conda-forge/noarch/xarray-2023.12.0-pyhd8ed1ab_0.conda#e9b31d3ab1b0dd5fd8c24419f6560b90 +https://conda.anaconda.org/conda-forge/noarch/zarr-2.16.1-pyhd8ed1ab_0.conda#59ec835edbee50266b7bdbadab7ba335 +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.22.0-py311h320fe9a_1.conda#10d1806e20da040c58c36deddf51c70c +https://conda.anaconda.org/conda-forge/noarch/cf_xarray-0.8.6-pyhd8ed1ab_0.conda#2e33e3bdf2b1a79989ad792ac22104d3 +https://conda.anaconda.org/conda-forge/noarch/cmocean-3.0.3-pyhd8ed1ab_0.conda#eec7df83d725696d32c7bf99aff21d82 +https://conda.anaconda.org/conda-forge/noarch/dask-jobqueue-0.8.2-pyhd8ed1ab_0.conda#cc344a296a41369bcb05f7216661cec8 +https://conda.anaconda.org/conda-forge/noarch/esgf-pyclient-0.3.1-pyhca7485f_3.conda#1d43833138d38ad8324700ce45a7099a +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 +https://conda.anaconda.org/conda-forge/linux-64/fiona-1.9.4-py311hbac4ec9_0.conda#1d3445f5f7fa002a1c149c405376f012 +https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb +https://conda.anaconda.org/conda-forge/linux-64/libarrow-12.0.1-h657c46f_7_cpu.conda#4de6e12428b7018f1f8a1e8dda555243 +https://conda.anaconda.org/conda-forge/linux-64/magics-4.14.2-hd3d5bb6_0.conda#3c571b994b6ce2b4d2c7b98be77a8ebe +https://conda.anaconda.org/conda-forge/noarch/nbclient-0.8.0-pyhd8ed1ab_0.conda#e78da91cf428faaf05701ce8cc8f2f9b +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 +https://conda.anaconda.org/conda-forge/linux-64/ncl-6.6.2-hf70af60_47.conda#ee27133164cb9f5e74681bdb8839688f +https://conda.anaconda.org/conda-forge/linux-64/nco-5.1.6-hd62b316_0.conda#af7780f76ee37325d264327e21a478f5 +https://conda.anaconda.org/conda-forge/noarch/prospector-1.10.3-pyhd8ed1ab_0.conda#f551d4d859a1d70c6abff8310a655481 +https://conda.anaconda.org/conda-forge/linux-64/psyplot-1.4.3-py311h38be061_1.tar.bz2#f0c9a1067c03e8f05e53ef0c5ad5fab3 +https://conda.anaconda.org/conda-forge/linux-64/py-xgboost-1.7.6-cuda118_py311h0be3a32_6.conda#e9989e03af742084940a11c7c3c395a5 https://conda.anaconda.org/conda-forge/noarch/r-abind-1.4_5-r41hc72bb7e_1004.tar.bz2#831186670e5786df30f8ddeb5a623c5a https://conda.anaconda.org/conda-forge/linux-64/r-backports-1.4.1-r41h06615bd_1.tar.bz2#9a00c3283f8fb4bce68deffe08fbe09d https://conda.anaconda.org/conda-forge/noarch/r-bigmemory.sri-0.1.6-r41hc72bb7e_0.tar.bz2#926471a5be30d287a25f2d10446d6066 @@ -559,21 +557,18 @@ https://conda.anaconda.org/conda-forge/noarch/r-withr-2.5.0-r41hc72bb7e_1.tar.bz https://conda.anaconda.org/conda-forge/linux-64/r-xfun-0.39-r41ha503ecb_0.conda#555ee06849209b9471946da6f09bb98b https://conda.anaconda.org/conda-forge/noarch/r-xmlparsedata-1.0.5-r41hc72bb7e_1.tar.bz2#921c0ef7104d8df0ab506f1bb81a062c https://conda.anaconda.org/conda-forge/linux-64/r-yaml-2.3.7-r41h133d619_0.conda#4af88071a607237aa73a3cbd51788a39 -https://conda.anaconda.org/conda-forge/linux-64/scikit-image-0.20.0-py311h2872171_1.conda#24fca64735554fdf2794c69f5b3d9a06 -https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.2.2-py311hc009520_2.conda#538b903a5572cf48fb87c8d30fc06e0d -https://conda.anaconda.org/conda-forge/noarch/seaborn-base-0.12.2-pyhd8ed1ab_0.conda#cf88f3a1c11536bc3c10c14ad00ccc42 -https://conda.anaconda.org/conda-forge/noarch/sparse-0.14.0-pyhd8ed1ab_0.conda#ee01b310177a0612554b9d20e537fdbe -https://conda.anaconda.org/conda-forge/linux-64/cdo-2.2.0-h3667792_2.conda#9c6e6832df36787cac3675bbf1fd786b -https://conda.anaconda.org/conda-forge/linux-64/imagemagick-7.1.1_9-pl5321hfda792c_0.conda#9332cec0a5c049edb47dacc9a8b2f209 -https://conda.anaconda.org/conda-forge/noarch/iris-3.6.0-pyha770c72_0.conda#a213bee1c2fab6f99c4f66ef5f0b34f9 +https://conda.anaconda.org/conda-forge/linux-64/scikit-image-0.22.0-py311h320fe9a_2.conda#e94b7f09b52628b89e66cdbd8c3029dd +https://conda.anaconda.org/conda-forge/noarch/seaborn-base-0.13.0-pyhd8ed1ab_0.conda#082666331726b2438986cfe33ae9a8ee +https://conda.anaconda.org/conda-forge/linux-64/cdo-2.2.0-he026af2_4.conda#6c00b0a21b3de8a149eee137e83465d3 +https://conda.anaconda.org/conda-forge/linux-64/imagemagick-7.1.1_15-pl5321hf48ede7_0.conda#53c9f7169b61e615d5f41c8d70a72c00 +https://conda.anaconda.org/conda-forge/noarch/iris-3.7.0-pyha770c72_0.conda#dccc1f660bf455c239adaabf56b91dc9 https://conda.anaconda.org/conda-forge/noarch/lime-0.2.0.1-pyhd8ed1ab_1.tar.bz2#789ce01416721a5533fb74aa4361fd13 https://conda.anaconda.org/conda-forge/noarch/mapgenerator-1.0.7-pyhd8ed1ab_0.conda#d18db96ef2a920b0ecefe30282b0aecf -https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.4.0-pyhd8ed1ab_0.conda#4456e6030a8309bdad57569b0170b6a3 -https://conda.anaconda.org/conda-forge/noarch/prospector-1.10.2-pyhd8ed1ab_0.conda#2c536985982f7e531df8d640f554008a -https://conda.anaconda.org/conda-forge/noarch/psy-maps-1.4.2-pyhd8ed1ab_0.tar.bz2#3ed13103dfd46f71dc870d188bd0b276 -https://conda.anaconda.org/conda-forge/linux-64/py-xgboost-1.7.4-cuda111py311h82c1ec6_2.conda#15e4e8d751e4740367631c17f460ad58 -https://conda.anaconda.org/conda-forge/linux-64/pyarrow-12.0.0-py311hdf9aeb4_7_cpu.conda#280733071f7aadc4d6c8c77c22e2816d -https://conda.anaconda.org/conda-forge/linux-64/pydot-1.4.2-py311h38be061_3.tar.bz2#64a77de29fde80aef5013ddf5e62a564 +https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.12.0-pyhd8ed1ab_0.conda#4d67c68fd0d130091ada039bc2d81b33 +https://conda.anaconda.org/conda-forge/linux-64/psy-simple-1.4.1-py311h38be061_2.tar.bz2#4c9101d329f6bc09c2617a80e3eb9c89 +https://conda.anaconda.org/conda-forge/noarch/py-cordex-0.6.6-pyhd8ed1ab_0.conda#255f9eac03143526c8aed41d1d091c63 +https://conda.anaconda.org/conda-forge/linux-64/pyarrow-12.0.1-py311h39c9aba_7_cpu.conda#d513ab8d10ec5f3ee45b419c836195ec +https://conda.anaconda.org/conda-forge/linux-64/pydot-1.4.2-py311h38be061_4.conda#5c223cb0d9c05552bf9d1586a92720b2 https://conda.anaconda.org/conda-forge/linux-64/r-askpass-1.1-r41h06615bd_3.tar.bz2#c8ec8683302ad9a2345cb31ab28e6c6b https://conda.anaconda.org/conda-forge/linux-64/r-bigmemory-4.6.1-r41h7525677_1.tar.bz2#6a956b57b027b49b7a9ca48031a8bbd6 https://conda.anaconda.org/conda-forge/linux-64/r-checkmate-2.2.0-r41h57805ef_0.conda#dc314ad76563387e70e0117c5398a15a @@ -598,14 +593,16 @@ https://conda.anaconda.org/conda-forge/noarch/r-rprojroot-2.0.3-r41hc72bb7e_1.ta https://conda.anaconda.org/conda-forge/linux-64/r-sp-1.6_1-r41h57805ef_0.conda#b7943adfe3494b4c4dc8e3b58fc6602d https://conda.anaconda.org/conda-forge/linux-64/r-spam-2.9_1-r41hb20cf53_1.conda#9eab4a6bfff4bddeee5ed946c47830fa https://conda.anaconda.org/conda-forge/linux-64/r-timechange-0.2.0-r41h38f115c_0.conda#04a4229419d779a1e27395d70d493571 -https://conda.anaconda.org/conda-forge/linux-64/r-xml2-1.3.4-r41h096396e_0.conda#8745024d295023466489aadcb3412a57 +https://conda.anaconda.org/conda-forge/linux-64/r-xml2-1.3.4-r41h1ad5fc0_1.conda#82c1446591783493d65273a158e8ce28 https://conda.anaconda.org/conda-forge/linux-64/r-zoo-1.8_12-r41h133d619_0.conda#1d432d2eba171727afd03507faa5e2f6 -https://conda.anaconda.org/conda-forge/linux-64/statsmodels-0.14.0-py311h1f0f07a_1.conda#a1daa39fa0bfed4d91a3640c2274034a -https://conda.anaconda.org/conda-forge/noarch/xesmf-0.7.1-pyhd8ed1ab_0.conda#3a9cc63d7bcbc8d738a0e92faf8b6c07 -https://conda.anaconda.org/conda-forge/noarch/dask-2023.6.0-pyhd8ed1ab_0.conda#187668ed10c12ad03aded53bc8e7aee6 -https://conda.anaconda.org/conda-forge/noarch/nbconvert-pandoc-7.4.0-pyhd8ed1ab_0.conda#127c702e1b1eff595be82bc6a78cfce0 +https://conda.anaconda.org/conda-forge/noarch/seaborn-0.13.0-hd8ed1ab_0.conda#ebd31a95a7008b7e164dad9dbbb5bb5a +https://conda.anaconda.org/conda-forge/noarch/xesmf-0.8.2-pyhd8ed1ab_0.conda#8e765a0eca0ce1cfa889cd9af82a23a8 +https://conda.anaconda.org/conda-forge/linux-64/xgboost-1.7.6-cuda118_py311h0be3a32_6.conda#f061993f8ed8ca90d2bb6a547a47109f +https://conda.anaconda.org/conda-forge/noarch/nbconvert-pandoc-7.12.0-pyhd8ed1ab_0.conda#460d7cac50322a39b61a833885a6a8d5 https://conda.anaconda.org/conda-forge/noarch/prov-2.0.0-pyhd3deb0d_0.tar.bz2#aa9b3ad140f6c0668c646f32e20ccf82 +https://conda.anaconda.org/conda-forge/noarch/psy-maps-1.4.2-pyhd8ed1ab_0.tar.bz2#3ed13103dfd46f71dc870d188bd0b276 https://conda.anaconda.org/conda-forge/linux-64/psy-reg-1.4.0-py311h38be061_3.conda#6f7871722c07922028043144e8873b37 +https://conda.anaconda.org/conda-forge/noarch/pyarrow-hotfix-0.6-pyhd8ed1ab_0.conda#ccc06e6ef2064ae129fab3286299abda https://conda.anaconda.org/conda-forge/noarch/python-cdo-1.6.0-pyhd8ed1ab_0.conda#3fd1a0b063c1fbbe4b7bd5a5a7601e84 https://conda.anaconda.org/conda-forge/linux-64/r-akima-0.6_2.3-r41h92ddd45_0.tar.bz2#bac0b7627ef744c98f4bc48885f52e72 https://conda.anaconda.org/conda-forge/noarch/r-callr-3.7.3-r41hc72bb7e_0.tar.bz2#af0891cc9b87e2954c9a3c66f144992d @@ -621,13 +618,11 @@ https://conda.anaconda.org/conda-forge/linux-64/r-openssl-2.0.6-r41habfbb5e_0.co https://conda.anaconda.org/conda-forge/noarch/r-r.utils-2.12.2-r41hc72bb7e_0.tar.bz2#302c316e29b7f426fa2de6f1f21dec75 https://conda.anaconda.org/conda-forge/linux-64/r-reshape-0.8.9-r41hc72bb7e_1.tar.bz2#acdda9b65715d9b2d7f928145605d283 https://conda.anaconda.org/conda-forge/noarch/r-scales-1.2.1-r41hc72bb7e_1.tar.bz2#2a557fcc9f60e56e788a6d1293bc8701 -https://conda.anaconda.org/conda-forge/linux-64/r-specsverification-0.5_3-r41h7525677_2.tar.bz2#4f29fc17a4ca578035f136f4724cfe46 +https://conda.anaconda.org/conda-forge/linux-64/r-specsverification-0.5_3-r41ha503ecb_3.conda#2bc51f0d44b98092ba57cf2f8671b490 https://conda.anaconda.org/conda-forge/linux-64/r-splancs-2.01_43-r41h8da6f51_1.tar.bz2#3a6aad0706541141d10e3b514467a080 https://conda.anaconda.org/conda-forge/linux-64/r-vctrs-0.6.2-r41ha503ecb_0.conda#1f7610a1863648cab254a9f85bd29dcd -https://conda.anaconda.org/conda-forge/noarch/seaborn-0.12.2-hd8ed1ab_0.conda#50847a47c07812f88581081c620f5160 -https://conda.anaconda.org/conda-forge/linux-64/xgboost-1.7.4-cuda111py311h569739f_2.conda#6c06902f15128dad7e0b26a3c83e6f43 -https://conda.anaconda.org/conda-forge/noarch/iris-esmf-regrid-0.7.0-pyhd8ed1ab_0.conda#de82eb8d09362babacafe6b7e27752ac -https://conda.anaconda.org/conda-forge/noarch/nbconvert-7.4.0-pyhd8ed1ab_0.conda#a86727968b41c20dd3d73b91632e77dc +https://conda.anaconda.org/conda-forge/noarch/dask-2023.12.0-pyhd8ed1ab_0.conda#5cfc00e93b71fba459bede86419d0f01 +https://conda.anaconda.org/conda-forge/noarch/nbconvert-7.12.0-pyhd8ed1ab_0.conda#364e28ab12477494e72839aaa588073d https://conda.anaconda.org/conda-forge/noarch/r-cyclocomp-1.1.0-r41hc72bb7e_1005.tar.bz2#800e1da5bf774be48934b8865dd78d33 https://conda.anaconda.org/conda-forge/noarch/r-gridextra-2.3-r41hc72bb7e_1004.tar.bz2#71ebed7e976df735ff3443bb88bd154f https://conda.anaconda.org/conda-forge/noarch/r-httr-1.4.6-r41hc72bb7e_0.conda#53dbb769c96782db54bf2d414fc9b239 @@ -637,10 +632,10 @@ https://conda.anaconda.org/conda-forge/noarch/r-pillar-1.9.0-r41hc72bb7e_0.conda https://conda.anaconda.org/conda-forge/noarch/r-pkgload-1.3.2-r41hc72bb7e_0.tar.bz2#e23a1a8420ab52056d86a6f9691d23fa https://conda.anaconda.org/conda-forge/linux-64/r-purrr-1.0.1-r41h133d619_0.conda#d7404238cac0da3c97dc08503d116a2f https://conda.anaconda.org/conda-forge/noarch/r-r.cache-0.16.0-r41hc72bb7e_1.tar.bz2#aef451160d655cc630d8038d934dced3 -https://conda.anaconda.org/conda-forge/noarch/esmvalcore-2.8.1-pyhd8ed1ab_0.conda#d3a13d0cb05e7e9a7ae03d8e54e4fc16 -https://conda.anaconda.org/conda-forge/noarch/nbsphinx-0.9.2-pyhd8ed1ab_0.conda#d1212b423fdd10d2da59601385561ff7 +https://conda.anaconda.org/conda-forge/noarch/iris-esmf-regrid-0.9.0-pyhd8ed1ab_0.conda#570f2c6e387fd6dac5356a5152f91b3f https://conda.anaconda.org/conda-forge/noarch/r-climprojdiags-0.3.2-r41hc72bb7e_0.conda#9922b863cd10035cbb75e3c2edae64a7 https://conda.anaconda.org/conda-forge/linux-64/r-tibble-3.2.1-r41h133d619_1.conda#3ae9b78fb1d8a44deed24a27cce33ebf +https://conda.anaconda.org/conda-forge/label/esmvalcore_rc/noarch/esmvalcore-2.10.0rc1-pyh39db41b_0.conda#b973ee8c35712a7d21830ed06bdbc42d https://conda.anaconda.org/conda-forge/noarch/r-ggplot2-3.4.2-r41hc72bb7e_0.conda#c2b04f4ff351d84bf51fd5a77b5c9b6c https://conda.anaconda.org/conda-forge/noarch/r-rematch2-2.1.2-r41hc72bb7e_2.tar.bz2#f67eae0562ffc808b82f1590776c25f5 https://conda.anaconda.org/conda-forge/noarch/r-styler-1.10.1-r41hc72bb7e_0.conda#c12b81cff8bb8745ffbe7aeb9dfd795f @@ -653,3 +648,12 @@ https://conda.anaconda.org/conda-forge/linux-64/r-testthat-3.1.8-r41ha503ecb_0.c https://conda.anaconda.org/conda-forge/linux-64/r-geomap-2.5_0-r41h06615bd_1.tar.bz2#dabe8f942d619075cbd13c1481c8b538 https://conda.anaconda.org/conda-forge/noarch/r-lintr-3.0.2-r41hc72bb7e_0.tar.bz2#769c2305486b74fd61d85bfef2296f27 https://conda.anaconda.org/conda-forge/noarch/r-s2dverification-2.10.3-r41hc72bb7e_1.tar.bz2#2253f130c8dab435824d6ddb10a41c73 +https://conda.anaconda.org/conda-forge/noarch/autodocsumm-0.2.6-pyhd8ed1ab_0.tar.bz2#4409dd7e06a62c3b2aa9e96782c49c6d +https://conda.anaconda.org/conda-forge/noarch/nbsphinx-0.9.3-pyhd8ed1ab_0.conda#0dbaa7d08d3d79b2a1a4dd6a02cc4581 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.14.4-pyhd8ed1ab_0.conda#c79b8443908032263ffb40ee6215e9e4 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.7-pyhd8ed1ab_0.conda#aebfabcb60c33a89c1f9290cab49bc93 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.5-pyhd8ed1ab_0.conda#ebf08f5184d8eaa486697bc060031953 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.4-pyhd8ed1ab_0.conda#a9a89000dfd19656ad004b937eeb6828 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.6-pyhd8ed1ab_0.conda#cf5c9649272c677a964a7313279e3a9b +https://conda.anaconda.org/conda-forge/noarch/sphinx-7.2.6-pyhd8ed1ab_0.conda#bbfd1120d1824d2d073bc65935f0e4c0 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.9-pyhd8ed1ab_0.conda#0612e497d7860728f2cda421ea2aec09 diff --git a/doc/sphinx/source/api/esmvaltool.diag_scripts.monitor.rst b/doc/sphinx/source/api/esmvaltool.diag_scripts.monitor.rst index bb43997c43..3b1e3e6548 100644 --- a/doc/sphinx/source/api/esmvaltool.diag_scripts.monitor.rst +++ b/doc/sphinx/source/api/esmvaltool.diag_scripts.monitor.rst @@ -11,6 +11,7 @@ Examples -------- * :ref:`recipe_monitor` +* :ref:`recipe_model_evaluation` Diagnostic scripts diff --git a/doc/sphinx/source/changelog.rst b/doc/sphinx/source/changelog.rst index e26ec7d26b..52c3aa3086 100644 --- a/doc/sphinx/source/changelog.rst +++ b/doc/sphinx/source/changelog.rst @@ -3,6 +3,113 @@ Changelog ========= +.. _changelog-v2-9-0: + +v2.9.0 +------ + +Highlights +~~~~~~~~~~ + +- A new :ref:`diagnostic ` has been + added to provide a high-level interface to + `seaborn `__, + a Python data visualization library based on + `matplotlib `__. + See the :ref:`recipe documentation ` for more + information. + +- We have included a new recipe and diagnostic that represent the major + physical processes that describe Arctic-midlatitude teleconnections and + provide the basis for the CMIP6 model evaluation for the further application + of causal discovery. + The results are discussed in the article + `"Causal model evaluation of Arctic-midlatitude teleconnections in CMIP6" `__ + by Galytska et al. (in review in Journal of Geophysical Research: Atmospheres). + +- It is now possible to use the + `Dask distributed scheduler `__, + which can + `significantly reduce the run-time of recipes `__. + Configuration examples and advice are available in the + :ref:`ESMValCore documentation `. + If configured, the Dask distributed scheduler will also be used by diagnostic + scripts written in Python, so make sure to use + `lazy data `__ + wherever it is possible in your (new) diagnostics. + More work on improving the computational performance is planned, so please + share your experiences, good and bad, with this new feature in + `ESMValGroup/ESMValCore#1763 `__. + +This release includes + +Bug fixes +~~~~~~~~~ + +- Fixed usage of ``work_dir`` in some CMORizer scripts (`#3192 `__) `Rémi Kazeroni `__ +- Realize data for scalar cube in `recipe_carvalhais14nat` to avert issue from dask latest (2023.6.0) (`#3265 `__) `Valeriu Predoi `__ +- Fix failing ``mlr`` diagnostic test by adding new scikit-learn default tag (`#3273 `__) `Rémi Kazeroni `__ +- Fix ordering of models in perfmetrics diagnostic script (`#3275 `__) `Lisa Bock `__ + +Documentation +~~~~~~~~~~~~~ + +- Update release schedule after v2.8.0 (`#3138 `__) `Rémi Kazeroni `__ +- Added reference entry for Winterstein (`#3154 `__) `FranziskaWinterstein `__ +- Show logo on PyPI (`#3185 `__) `Valeriu Predoi `__ +- Add Release Managers for v2.9.0 and v2.10.0 (`#3184 `__) `Rémi Kazeroni `__ +- Fix readthedocs build with esmpy>=8.4.0 and missing ESMFMKFILE variable (`#3205 `__) `Valeriu Predoi `__ +- Add ESMValCore release v2.8.1 into the documentation (`#3235 `__) `Rémi Kazeroni `__ +- Modified links to the tutorial (`#3236 `__) `Rémi Kazeroni `__ +- Fix gitter badge in README (`#3258 `__) `Rémi Kazeroni `__ +- Add release notes for v2.9.0 (`#3266 `__) `Bouwe Andela `__ + +Diagnostics +~~~~~~~~~~~ + +- New plot_type 1d_profile in monitor (`#3178 `__) `FranziskaWinterstein `__ +- Add Seaborn diagnostic (`#3155 `__) `Manuel Schlund `__ +- New recipe and diagnostic for Arctic-midlatitude research (`#3021 `__) `Evgenia Galytska `__ +- Generate climatology on the fly for AutoAssess soil moisture (`#3197 `__) `Alistair Sellar `__ +- Remove "fx_variables" from recipe_tebaldi21esd.yml (`#3211 `__) `Birgit Hassler `__ +- Remove "fx_variables" from ipccwg1ar5ch9 recipes (`#3215 `__) `katjaweigel `__ +- Remove "fx_variables" from recipe_wenzel14jgr.yml (`#3212 `__) `Birgit Hassler `__ +- Update obs4MIPs dataset to the current naming scheme in recipe_smpi.yml (`#2991 `__) `Bouwe Andela `__ +- Fixed pandas diagnostics for pandas>=2.0.0 (`#3209 `__) `Manuel Schlund `__ +- Update recipe_impact.yml to work with newer versions of `pandas` (`#3220 `__) `Bouwe Andela `__ +- Add variable long names to provenance record in monitoring diagnostics (`#3222 `__) `Brei Soliño `__ + +Observational and re-analysis dataset support +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +- Add CMORizer for GPCP-SG (pr) (`#3150 `__) `FranziskaWinterstein `__ +- Extension of NASA MERRA2 CMORizer (cl, cli, clivi, clw, clwvi) (`#3167 `__) `Axel Lauer `__ + +Automatic testing +~~~~~~~~~~~~~~~~~ + +- Add a CircleCI-testing-specific ``recipe_python_for_CI.yml`` to avoid calling geolocator/Nominatim over CI (`#3159 `__) `Valeriu Predoi `__ +- Check if Python minor version changed after Julia install in development installation test (`#3213 `__) `Valeriu Predoi `__ +- Fix tests using deprecated ``esmvalcore._config`` module that has been removed in ESMValCore v2.9 (`#3204 `__) `Valeriu Predoi `__ + +Installation +~~~~~~~~~~~~ + +- Add support for Python=3.11 (`#3173 `__) `Valeriu Predoi `__ +- Drop python=3.8 support (`#3193 `__) `Valeriu Predoi `__ +- Repair generation of conda lock files (`#3148 `__) `Valeriu Predoi `__ +- Modernize lock creation script and repair lock generation (`#3174 `__) `Valeriu Predoi `__ +- Pin numpy !=1.24.3 due to severe masking bug (`#3182 `__) `Valeriu Predoi `__ +- Update xesmf to versions >= 0.4.0 (`#2728 `__) `Klaus Zimmermann `__ +- Update esmpy import for ESMF version 8.4.0 or larger (`#3188 `__) `Valeriu Predoi `__ +- Relax the pin on iris to allow the use of older versions for performance reasons (`#3270 `__) `Bouwe Andela `__ +- Use ESMValCore v2.9.0 (`#3274 `__) `Bouwe Andela `__ + +Improvements +~~~~~~~~~~~~ + +- Update pre-commit hooks (`#3189 `__) `Bouwe Andela `__ +- Add support for using a dask distributed scheduler (`#3151 `__) `Bouwe Andela `__ .. _changelog-v2-8-0: @@ -19,7 +126,7 @@ Highlights climatologies from CMIP models as used in `Lauer et al. (2023), J. Climate `__. See :ref:`recipe documentation ` about added recipes. -- Addition of a set of recipes for extreme events, regional and impact +- Addition of a set of recipes for extreme events, regional and impact evaluation as used in `Weigel et al. (2021), J. Climate `__ and in IPCC AR5. See :ref:`recipe documentation ` about added recipes. @@ -265,7 +372,7 @@ Documentation - Update documentation for the Landschuetzer 2016 recipe. (`#2801 `__) `Tomas Torsvik `__ - Fixed anaconda badge in README (`#2866 `__) `Valeriu Predoi `__ - Update release strategy notes (`#2734 `__) `sloosvel `__ -- Add documention on how to handle CMORizers for multiple dataset versions (`#2730 `__) `Rémi Kazeroni `__ +- Add documentation on how to handle CMORizers for multiple dataset versions (`#2730 `__) `Rémi Kazeroni `__ - Extending documentation: recipe maintainer + broken recipe policy (`#2719 `__) `Axel Lauer `__ Diagnostics @@ -310,8 +417,8 @@ Highlights ~~~~~~~~~~ - A new monitoring diagnostic has been added to allow the comparison of model runs against reference datasets. For details, see :ref:`Monitoring diagnostic to show multiple datasets in one plot (incl. biases) `. -- A tool has been developed to compare the output of recipe runs against previous runs, in order to detect in an automatized way breaking changes between releases. Find more information in :ref:`Comparing recipe runs `. -- The recipe :ref:`Climate Change Hotspot ` allows to compute hotspots in any rectangular region. +- A tool has been developed to compare the output of recipe runs against previous runs, in order to detect in an automated way breaking changes between releases. Find more information in :ref:`Comparing recipe runs `. +- The recipe :ref:`Climate Change Hotspot ` allows to compute hotspots in any rectangular region. Please also note the highlights from the corresponding ESMValCore release :ref:`here`. Thanks to that ESMValTool has gained the following features: @@ -418,7 +525,7 @@ Thanks to that ESMValTool has gained the following features: - The new preprocessor ``extract_location`` can extract arbitrary locations on the Earth. - Time ranges can now be extracted using the `ISO 8601 format `_. -- The new preprocessor ``ensemble_statistics`` can calculate arbitrary statitics over all ensemble members of a simulation. +- The new preprocessor ``ensemble_statistics`` can calculate arbitrary statistics over all ensemble members of a simulation. This release includes diff --git a/doc/sphinx/source/community/code_documentation.rst b/doc/sphinx/source/community/code_documentation.rst index 82f8c3a8f7..1c211daf39 100644 --- a/doc/sphinx/source/community/code_documentation.rst +++ b/doc/sphinx/source/community/code_documentation.rst @@ -442,7 +442,10 @@ name to the list of authors in ``CITATION.cff`` and generate the entry for the :: pip install cffconvert - cffconvert --format zenodo --outfile .zenodo.json + cffconvert --infile CITATION.cff --format zenodo --outfile .zenodo.json + +Presently, this method unfortunately discards entries `communities` +and `grants` from that file; please restore them manually. Note that authors of recipes and/or diagnostics also need to be added to the file `esmvaltool/config-references.yml `__, diff --git a/doc/sphinx/source/community/dataset.rst b/doc/sphinx/source/community/dataset.rst index d70438d9bc..424d4d4694 100644 --- a/doc/sphinx/source/community/dataset.rst +++ b/doc/sphinx/source/community/dataset.rst @@ -65,10 +65,10 @@ The scientific reviewer needs to check this. Data availability ================= -Once your pull request has been approved by the reviewers, ask -`@remi-kazeroni `_ +Once your pull request has been approved by the reviewers, ask a member of +`@OBS-maintainers `_ to add the new dataset to the data pool at DKRZ and CEDA-Jasmin. -He is also the person in charge of merging CMORizer pull requests. +This team is in charge of merging CMORizer pull requests. .. _dataset_checklist: @@ -165,14 +165,14 @@ Run ``esmvaltool/recipes/examples/recipe_check_obs.yml`` for new dataset. RAW data -------- -Contact person in charge of ESMValTool data pool (`@remi-kazeroni`_) and +Contact the team in charge of ESMValTool data pool (`@OBS-maintainers`_) and request to copy RAW data to RAWOBS/Tier2 (Tier3). CMORized data ------------- -Contact person in charge of ESMValTool data pool (`@remi-kazeroni`_) and +Contact the team in charge of ESMValTool data pool (`@OBS-maintainers`_) and request to * Merge the pull request diff --git a/doc/sphinx/source/community/diagnostic.rst b/doc/sphinx/source/community/diagnostic.rst index 641a2da892..285815f7cf 100644 --- a/doc/sphinx/source/community/diagnostic.rst +++ b/doc/sphinx/source/community/diagnostic.rst @@ -179,6 +179,10 @@ and finally it will store provenance information. Provenance information is stor and provided that the provenance tree is small, also plotted in an SVG file for human inspection. In addition to provenance information, a caption is also added to the plots. + +Provenance information from the recipe is automatically recorded by ESMValCore, whereas +diagnostic scripts must include code specifically to record provenance. See below for +documentation of provenance attributes that can be included in a recipe. When contributing a diagnostic, please make sure it records the provenance, and that no warnings related to provenance are generated when running the recipe. To allow the ESMValCore to keep track of provenance (e.g. which input files @@ -233,20 +237,25 @@ Arbitrarily named other items are also supported. Please see the (installed version of the) file `esmvaltool/config-references.yml `_ for all available information on each item, see :ref:`esmvalcore:config-ref` for -an introduction. -In this file, the information is written in the form of ``key: value``. -Note that we add the keys to the diagnostics. -The keys will automatically be replaced by their values in the final provenance records. -For example, in the ``config-references.yml`` there is a category for types of the plots: +an introduction. It is also possible to add custom provenance information by adding items to each category in this file. +In this file, the information is written in the form + +.. code-block:: console + + key: + value: description + +for example .. code-block:: console plot_types: errorbar: error bar plot -In the diagnostics, we add the key as: -:code:`plot_types: [errorbar]` -It is also possible to add custom provenance information by adding items to each category in this file. +To use these items, include them in the provenance record dictionary in the form +:code:`key: [value]` +i.e. for the example above as +:code:`'plot_types': ['errorbar']`. In order to communicate with the diagnostic script, two interfaces have been defined, which are described in the `ESMValCore documentation `_. @@ -258,7 +267,7 @@ see the instructions and examples below on how to add provenance information: Recording provenance in a Python diagnostic script -------------------------------------------------- -Always use :meth:`esmvaltool.diag_scripts.shared.run_diagnostic` at the end of your script: +Always use :func:`esmvaltool.diag_scripts.shared.run_diagnostic` at the end of your script: .. code-block:: python @@ -266,16 +275,9 @@ Always use :meth:`esmvaltool.diag_scripts.shared.run_diagnostic` at the end of y with run_diagnostic() as config: main(config) -And make use of a :class:`esmvaltool.diag_scripts.shared.ProvenanceLogger` to log provenance: - -.. code-block:: python - - with ProvenanceLogger(cfg) as provenance_logger: - provenance_logger.log(diagnostic_file, provenance_record) - -The ``diagnostic_file`` can be obtained using :class:`esmvaltool.diag_scripts.shared.get_diagnostic_filename`. - -The ``provenance_record`` is a dictionary of provenance items, for example: +Create a ``provenance_record`` for each diagnostic file (i.e. image or data +file) that the diagnostic script outputs. The ``provenance_record`` is a +dictionary of provenance items, for example: .. code-block:: python @@ -294,10 +296,25 @@ The ``provenance_record`` is a dictionary of provenance items, for example: 'statistics': ['mean'], } +To save a matplotlib figure, use the convenience function +:func:`esmvaltool.diag_scripts.shared.save_figure`. Similarly, to save Iris cubes use +:func:`esmvaltool.diag_scripts.shared.save_data`. Both of these functions take +``provenance_record`` as an argument and log the provenance accordingly. Have a look at the example Python diagnostic in `esmvaltool/diag_scripts/examples/diagnostic.py `_ for a complete example. +For any other files created, you will need to make use of a +:class:`esmvaltool.diag_scripts.shared.ProvenanceLogger` to log provenance. Include the +following code directly after the file is saved: + +.. code-block:: python + + with ProvenanceLogger(cfg) as provenance_logger: + provenance_logger.log(diagnostic_file, provenance_record) + +The full path of a ``diagnostic_file`` can be obtained using :class:`esmvaltool.diag_scripts.shared.get_diagnostic_filename`. + Recording provenance in an NCL diagnostic script ------------------------------------------------ Always call the ``log_provenance`` procedure after plotting from your NCL diag_script: diff --git a/doc/sphinx/source/community/release_strategy/release_strategy.rst b/doc/sphinx/source/community/release_strategy/release_strategy.rst index d4662f38b3..9bd8d71e2d 100644 --- a/doc/sphinx/source/community/release_strategy/release_strategy.rst +++ b/doc/sphinx/source/community/release_strategy/release_strategy.rst @@ -54,18 +54,6 @@ With the following release schedule, we strive to have three releases per year a Upcoming releases ^^^^^^^^^^^^^^^^^ -- 2.9.0 (Release Manager: `Bouwe Andela`_) - -+------------+--------------------------+ -| 2023-06-05 |ESMValCore feature freeze | -+------------+--------------------------+ -| 2023-06-12 |ESMValCore release | -+------------+--------------------------+ -| 2023-06-19 |ESMValTool feature freeze | -+------------+--------------------------+ -| 2023-06-26 |ESMValTool release | -+------------+--------------------------+ - - 2.10.0 (Release Manager: `Klaus Zimmermann`_) +------------+--------------------------+ @@ -81,6 +69,28 @@ Upcoming releases Past releases ^^^^^^^^^^^^^ +- 2.9.0 (Release Manager: `Bouwe Andela`_) + ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| Planned | Done | Event | Changelog | ++============+============+=============================================================================================+====================================+ +| 2023-06-05 | | ESMValCore Feature Freeze | | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2023-06-12 | 2023-07-04 | `ESMValCore Release 2.9.0 `_ | :ref:`esmvalcore:changelog-v2-9-0` | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2023-06-19 | | ESMValTool Feature Freeze | | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| 2023-06-26 | 2023-07-06 | `ESMValTool Release 2.9.0 `_ | :ref:`changelog-v2-9-0` | ++------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ + +- 2.8.1 (Bugfix, Release Manager: `Valeriu Predoi`_) + ++------------+---------------------------------------------------------------------------------------------+------------------------------------+ +| Done | Event | Changelog | ++============+=============================================================================================+====================================+ +| 2023-06-02 | `ESMValCore Release 2.8.1 `_ | :ref:`esmvalcore:changelog-v2-8-1` | ++------------+---------------------------------------------------------------------------------------------+------------------------------------+ + - 2.8.0 (Release Manager: `Rémi Kazeroni`_) +------------+------------+---------------------------------------------------------------------------------------------+------------------------------------+ @@ -454,7 +464,37 @@ and create the new release from the release branch (i.e. not from ``main``). The release tag always starts with the letter ``v`` followed by the version number, e.g. ``v2.1.0``. -6. Create and upload the PyPI package +6. Mark the release in the main branch +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +When the (pre-)release is tagged, it is time to merge the release branch back into `main`. +We do this for two reasons, namely, one, to mark the point up to which commits in `main` +have been considered for inclusion into the present release, and, two, to inform +setuptools-scm about the version number so that it creates the correct version number in +`main`. +However, unlike in a normal merge, we do not want to integrate any of the changes from the +release branch into main. +This is because all changes that should be in both branches, i.e. bug fixes, originate from +`main` anyway and the only other changes in the release branch relate to the release itself. +To take this into account, we perform the merge in this case on the command line using `the +ours merge strategy `__ +(``git merge -s ours``), not to be confused with the ``ours`` option to the ort merge strategy +(``git merge -X ours``). +For details about merge strategies, see the above-linked page. +To execute the merge use following sequence of steps + +.. code-block:: bash + + git fetch + git checkout main + git pull + git merge -s ours v2.1.x + git push + +Note that the release branch remains intact and you should continue any work on the release +on that branch. + +7. Create and upload the PyPI package ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ The package is automatically uploaded to the @@ -485,7 +525,7 @@ Follow these steps to create a new Python package: You can read more about this in `Packaging Python Projects `__. -7. Create the Conda package +8. Create the Conda package ~~~~~~~~~~~~~~~~~~~~~~~~~~~ The ``esmvaltool`` package is published on the `conda-forge conda channel @@ -506,7 +546,7 @@ they will merge the pull request, which will in turn publish the package on conda-forge some time later. Contact the feedstock maintainers if you want to become a maintainer yourself. -8. Check the Docker images +9. Check the Docker images ~~~~~~~~~~~~~~~~~~~~~~~~~~ There are three main Docker container images available for ESMValTool on diff --git a/doc/sphinx/source/community/review.rst b/doc/sphinx/source/community/review.rst index 41e596dda9..3429c0ef7a 100644 --- a/doc/sphinx/source/community/review.rst +++ b/doc/sphinx/source/community/review.rst @@ -69,7 +69,7 @@ GitHub. Pull requests are merged by the `@ESMValGroup/esmvaltool-coreteam`_. Specifically, pull requests containing a :ref:`CMORizer script` can only be merged by -`@remi-kazeroni`_, who will then add the CMORized data to the OBS data pool at +a member of `@OBS-maintainers`_, who will then add the CMORized data to the OBS data pool at DKRZ and CEDA-Jasmin. The team member who does the merge first checks that both the technical and scientific reviewer approved the pull request and that the reviews were @@ -215,7 +215,7 @@ their opinion and try to find a solution. .. _`@ESMValGroup/tech-reviewers`: https://github.com/orgs/ESMValGroup/teams/tech-reviewers .. _`@ESMValGroup/science-reviewers`: https://github.com/orgs/ESMValGroup/teams/science-reviewers .. _`@ESMValGroup/esmvaltool-coreteam`: https://github.com/orgs/ESMValGroup/teams/esmvaltool-coreteam -.. _`@remi-kazeroni`: https://github.com/remi-kazeroni +.. _`@OBS-maintainers`: https://github.com/orgs/ESMValGroup/teams/obs-maintainers .. _`pull request template`: https://raw.githubusercontent.com/ESMValGroup/ESMValTool/main/.github/pull_request_template.md .. _`Google meet`: https://meet.google.com .. _`Jitsi meet`: https://meet.jit.si diff --git a/doc/sphinx/source/conf.py b/doc/sphinx/source/conf.py index 4606f1e359..5ef08c3b06 100644 --- a/doc/sphinx/source/conf.py +++ b/doc/sphinx/source/conf.py @@ -71,8 +71,6 @@ 'sphinx.ext.viewcode', 'sphinx.ext.napoleon', 'autodocsumm', - # github.com/readthedocs/sphinx_rtd_theme/issues/1451 - 'sphinxcontrib.jquery', ] autodoc_default_options = { @@ -155,7 +153,16 @@ # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -# html_theme_options = {} +# +# Avoid the following warning issued by pydata_sphinx_theme: +# +# "WARNING: The default value for `navigation_with_keys` will change to `False` +# in the next release. If you wish to preserve the old behavior for your site, +# set `navigation_with_keys=True` in the `html_theme_options` dict in your +# `conf.py` file.Be aware that `navigation_with_keys = True` has negative +# accessibility implications: +# https://github.com/pydata/pydata-sphinx-theme/issues/1492" +html_theme_options = {"navigation_with_keys": False} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] diff --git a/doc/sphinx/source/develop/recipe.rst b/doc/sphinx/source/develop/recipe.rst index 4404428a6c..98f802505d 100644 --- a/doc/sphinx/source/develop/recipe.rst +++ b/doc/sphinx/source/develop/recipe.rst @@ -17,6 +17,6 @@ the specific parameters needed by a recipe that runs a personal diagnostic are: i.e. the full path to the personal diagnostic that the user needs to run. There is also a lesson available in the -`ESMValTool tutorial `_ +`ESMValTool tutorial `_ that describes in a step-by-step procedure how to write your own recipe. It can be found -`here `_. +`here `_. diff --git a/doc/sphinx/source/faq.rst b/doc/sphinx/source/faq.rst index 15d69192ca..10c72bd2cb 100644 --- a/doc/sphinx/source/faq.rst +++ b/doc/sphinx/source/faq.rst @@ -113,9 +113,15 @@ a symbolic link to it so it gets picked up at every re-run iteration: Can ESMValTool plot arbitrary model output? =========================================== -Recipe :ref:`recipe_monitor` allows for the plotting of any preprocessed model. -The plotting parameters are set through a yaml configuration file, and the -type of plots to be generated are determined in the recipe. +:ref:`recipe_model_evaluation` provides a set of recipes that can be used for a +basic climate model evaluation with observational data. +This is especially useful to get an overview of the general performance of a +simulation. + +Furthermore, recipe :ref:`recipe_monitor` allows for the plotting of any +preprocessed model. +The plotting parameters are set through a yaml configuration file, and the type +of plots to be generated are determined in the recipe. Moreover, recipe :ref:`recipes_psyplot_diag` and the corresponding diagnostic :ref:`psyplot_diag.py ` provide a diff --git a/doc/sphinx/source/input.rst b/doc/sphinx/source/input.rst index b40e9f9846..6716d6ec55 100644 --- a/doc/sphinx/source/input.rst +++ b/doc/sphinx/source/input.rst @@ -355,6 +355,9 @@ A list of the datasets for which a CMORizers is available is provided in the fol +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | MAC-LWP | lwp, lwpStderr (Amon) | 3 | NCL | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| MERRA | cli, clivi, clt, clw, clwvi, hur, hus, lwp, pr, prw, ps, psl, rlut, rlutcs, rsdt, rsut, rsutcs, ta, | 3 | NCL | +| | tas, ts, ua, va, wap, zg (Amon) | | | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | MERRA2 | sm (Lmon) | 3 | Python | | | clt, pr, evspsbl, hfss, hfls, huss, prc, prsn, prw, ps, psl, rlds, rldscs, rlus, rlut, rlutcs, rsds, | | | | | rsdscs, rsdt, tas, tasmin, tasmax, tauu, tauv, ts, uas, vas, rsus, rsuscs, rsut, rsutcs, ta, ua, va, | | | @@ -364,6 +367,8 @@ A list of the datasets for which a CMORizers is available is provided in the fol +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | MOBO-DIC_MPIM | dissic (Omon) | 2 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| MOBO-DIC2004-2019 | dissic (Omon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | MODIS | cliwi, clt, clwvi, iwpStderr, lwpStderr (Amon), od550aer (aero) | 3 | NCL | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | MSWEP [#note1]_ | pr | 3 | n/a | @@ -380,11 +385,15 @@ A list of the datasets for which a CMORizers is available is provided in the fol +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | NIWA-BS | toz, tozStderr (Amon) | 3 | NCL | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| NOAA-CIRES-20CR | clt, clwvi, hus, prw, rlut, rsut (Amon) | 2 | Python | +| NOAA-CIRES-20CR-V2 | clt, clwvi, hus, prw, rlut, rsut (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| NOAA-CIRES-20CR-V3 | clt, clwvi, hus, prw, rlut, rlutcs, rsut, rsutcs (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| NOAA-MBL-CH4 | ch4s (Amon) | 2 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | NOAAGlobalTemp | tasa (Amon) | 2 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| NSIDC-0116-[nh|sh] | usi, vsi (day) | 3 | Python | +| NSIDC-0116-[nh|sh] [#note4]_ | usi, vsi (day) | 3 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | OceanSODA-ETHZ | areacello (Ofx), co3os, dissicos, fgco2, phos, spco2, talkos (Omon) | 2 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ @@ -424,6 +433,10 @@ A list of the datasets for which a CMORizers is available is provided in the fol can be found in the corresponding section of `recipe_check_obs.yml `__. +.. [#note4] The cmoriser requires PROJ>=9.3. Previous version of PROJ will return an error: + ``Internal Proj Error: proj_create: unhandled axis direction: UNKNOWN)`` + You can check the version of PROJ in your conda environment by running: + ``conda list PROJ``. .. _inputdata_native_datasets: diff --git a/doc/sphinx/source/quickstart/configuration.rst b/doc/sphinx/source/quickstart/configuration.rst index faee8dd2d8..34c29aac5c 100644 --- a/doc/sphinx/source/quickstart/configuration.rst +++ b/doc/sphinx/source/quickstart/configuration.rst @@ -23,6 +23,6 @@ the ``esmvaltool`` command can find the data on your system, before it can run a recipe. There is a lesson available in the -`ESMValTool tutorial `_ +`ESMValTool tutorial `_ that describes how to personalize the configuration file. It can be found -`at this site `_. +`at this site `_. diff --git a/doc/sphinx/source/quickstart/installation.rst b/doc/sphinx/source/quickstart/installation.rst index 5f91594ed3..a4f9f2a64c 100644 --- a/doc/sphinx/source/quickstart/installation.rst +++ b/doc/sphinx/source/quickstart/installation.rst @@ -37,10 +37,10 @@ The next sections will detail the procedure to install ESMValTool through each of these methods. There is also a lesson available in the -`ESMValTool tutorial `_ +`ESMValTool tutorial `_ that describes the installation of the ESMValTool in more detail. It can be found -`here `_. +`here `_. See `common installation issues`_ if you run into trouble. @@ -510,11 +510,11 @@ estate, so there is no need to install ESMValTool if you are just running recipe - Met Office: `esmvaltool` is available on the Linux estate after login and module loading via `module load`; see the ESMValTool Community of Practice SharePoint site for more details. -The ESMValTool Tutorial provides a `quickstart guide `__ +The ESMValTool Tutorial provides a `quickstart guide `__ that is particularly suited for new users that have an access to pre-installed version of ESMValTool. Information on how to request an account at CEDA-JASMIN and DKRZ-Levante and to get started with these HPC clusters -can be found on the setup page of the tutorial `here `__. +can be found on the setup page of the tutorial `here `__. .. _install_with_docker: diff --git a/doc/sphinx/source/quickstart/running.rst b/doc/sphinx/source/quickstart/running.rst index 8df3f99635..7f9cadbaa1 100644 --- a/doc/sphinx/source/quickstart/running.rst +++ b/doc/sphinx/source/quickstart/running.rst @@ -14,9 +14,9 @@ Running your first recipe ========================= There is a step-by-step tutorial available in the -`ESMValTool tutorial `_ +`ESMValTool tutorial `_ on how to run your first recipe. It can be found -`here `_. +`here `_. An `example recipe `_ diff --git a/doc/sphinx/source/recipes/broken_recipe_list.rst b/doc/sphinx/source/recipes/broken_recipe_list.rst index 7da6f41437..18471b2382 100644 --- a/doc/sphinx/source/recipes/broken_recipe_list.rst +++ b/doc/sphinx/source/recipes/broken_recipe_list.rst @@ -15,12 +15,12 @@ More details can be found in the :ref:`broken recipe policy * - Broken recipe - Affected diagnostics - Problem - - GitHub issue - * - :ref:`recipe_autoassess_landsurface_soilmoisture.yml ` - - All - - Dependency on some external climatology files - - `#2309 `_ + - GitHub issue * - `recipe_check_obs.yml` - `ERA5_native6` - Derivation of custom variables `rlus` and `rsus` - `#1388 `_ + * - :ref:`recipe_seaice_drift.yml ` + - `sea_ice_drift_SCICEX` + - ``shapely`` issue + - `#3243 `_ diff --git a/doc/sphinx/source/recipes/figures/examples/IPCC_AR6_figure_9.3a_1850-2100.png b/doc/sphinx/source/recipes/figures/examples/IPCC_AR6_figure_9.3a_1850-2100.png new file mode 100644 index 0000000000..67bccb204e Binary files /dev/null and b/doc/sphinx/source/recipes/figures/examples/IPCC_AR6_figure_9.3a_1850-2100.png differ diff --git a/doc/sphinx/source/recipes/figures/iht_toa/figure1_CERES-EBAF_CERES-EBAF.png b/doc/sphinx/source/recipes/figures/iht_toa/figure1_CERES-EBAF_CERES-EBAF.png new file mode 100644 index 0000000000..18aa689123 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/iht_toa/figure1_CERES-EBAF_CERES-EBAF.png differ diff --git a/doc/sphinx/source/recipes/figures/iht_toa/figure2_CERES-EBAF_CERES-EBAF.png b/doc/sphinx/source/recipes/figures/iht_toa/figure2_CERES-EBAF_CERES-EBAF.png new file mode 100644 index 0000000000..ae8afa7d25 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/iht_toa/figure2_CERES-EBAF_CERES-EBAF.png differ diff --git a/doc/sphinx/source/recipes/figures/iht_toa/figure3_CERES-EBAF_CERES-EBAF.png b/doc/sphinx/source/recipes/figures/iht_toa/figure3_CERES-EBAF_CERES-EBAF.png new file mode 100644 index 0000000000..6562693268 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/iht_toa/figure3_CERES-EBAF_CERES-EBAF.png differ diff --git a/doc/sphinx/source/recipes/figures/iht_toa/figure4_CERES-EBAF_CERES-EBAF.png b/doc/sphinx/source/recipes/figures/iht_toa/figure4_CERES-EBAF_CERES-EBAF.png new file mode 100644 index 0000000000..21076317ff Binary files /dev/null and b/doc/sphinx/source/recipes/figures/iht_toa/figure4_CERES-EBAF_CERES-EBAF.png differ diff --git a/doc/sphinx/source/recipes/figures/iht_toa/figure5_CERES-EBAF_CERES-EBAF.png b/doc/sphinx/source/recipes/figures/iht_toa/figure5_CERES-EBAF_CERES-EBAF.png new file mode 100644 index 0000000000..e241ae07e3 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/iht_toa/figure5_CERES-EBAF_CERES-EBAF.png differ diff --git a/doc/sphinx/source/recipes/figures/iht_toa/figure6_CERES-EBAF_CERES-EBAF.png b/doc/sphinx/source/recipes/figures/iht_toa/figure6_CERES-EBAF_CERES-EBAF.png new file mode 100644 index 0000000000..073ccff158 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/iht_toa/figure6_CERES-EBAF_CERES-EBAF.png differ diff --git a/doc/sphinx/source/recipes/figures/model_evaluation/annual_cycle_clt_southerocean_Amon.jpg b/doc/sphinx/source/recipes/figures/model_evaluation/annual_cycle_clt_southerocean_Amon.jpg new file mode 100644 index 0000000000..0e1e8a4531 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/model_evaluation/annual_cycle_clt_southerocean_Amon.jpg differ diff --git a/doc/sphinx/source/recipes/figures/model_evaluation/map_swcre_MPI-ESM1-2-HR_Amon.jpg b/doc/sphinx/source/recipes/figures/model_evaluation/map_swcre_MPI-ESM1-2-HR_Amon.jpg new file mode 100644 index 0000000000..f6abf01516 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/model_evaluation/map_swcre_MPI-ESM1-2-HR_Amon.jpg differ diff --git a/doc/sphinx/source/recipes/figures/model_evaluation/map_tas_MPI-ESM1-2-HR_Amon.jpg b/doc/sphinx/source/recipes/figures/model_evaluation/map_tas_MPI-ESM1-2-HR_Amon.jpg new file mode 100644 index 0000000000..50b5ebbd20 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/model_evaluation/map_tas_MPI-ESM1-2-HR_Amon.jpg differ diff --git a/doc/sphinx/source/recipes/figures/model_evaluation/timeseries_rtnt_ambiguous_dataset_Amon.jpg b/doc/sphinx/source/recipes/figures/model_evaluation/timeseries_rtnt_ambiguous_dataset_Amon.jpg new file mode 100644 index 0000000000..2b65fe97e7 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/model_evaluation/timeseries_rtnt_ambiguous_dataset_Amon.jpg differ diff --git a/doc/sphinx/source/recipes/figures/model_evaluation/variable_vs_lat_pr_Amon.jpg b/doc/sphinx/source/recipes/figures/model_evaluation/variable_vs_lat_pr_Amon.jpg new file mode 100644 index 0000000000..4e252d7904 Binary files /dev/null and b/doc/sphinx/source/recipes/figures/model_evaluation/variable_vs_lat_pr_Amon.jpg differ diff --git a/doc/sphinx/source/recipes/figures/monitor/hovmoeller_time_vs_lat_with_ref.png b/doc/sphinx/source/recipes/figures/monitor/hovmoeller_time_vs_lat_with_ref.png new file mode 100644 index 0000000000..4abd6df04f Binary files /dev/null and b/doc/sphinx/source/recipes/figures/monitor/hovmoeller_time_vs_lat_with_ref.png differ diff --git a/doc/sphinx/source/recipes/figures/monitor/hovmoeller_z_vs_time_with_ref.png b/doc/sphinx/source/recipes/figures/monitor/hovmoeller_z_vs_time_with_ref.png new file mode 100755 index 0000000000..734913c60b Binary files /dev/null and b/doc/sphinx/source/recipes/figures/monitor/hovmoeller_z_vs_time_with_ref.png differ diff --git a/doc/sphinx/source/recipes/figures/monitor/variable_vs_lat_with_ref.png b/doc/sphinx/source/recipes/figures/monitor/variable_vs_lat_with_ref.png new file mode 100644 index 0000000000..31cb81135d Binary files /dev/null and b/doc/sphinx/source/recipes/figures/monitor/variable_vs_lat_with_ref.png differ diff --git a/doc/sphinx/source/recipes/index.rst b/doc/sphinx/source/recipes/index.rst index 57c77e8b30..edcc48977a 100644 --- a/doc/sphinx/source/recipes/index.rst +++ b/doc/sphinx/source/recipes/index.rst @@ -21,6 +21,7 @@ large variety of input data. .. toctree:: :maxdepth: 1 + recipe_model_evaluation recipe_monitor recipe_psyplot recipe_seaborn @@ -45,6 +46,7 @@ Atmosphere recipe_gier20bg recipe_heatwaves_coldwaves recipe_hyint + recipe_iht_toa recipe_impact recipe_modes_of_variability recipe_mpqb_xch4 @@ -99,6 +101,7 @@ IPCC recipe_ipccwg1ar6ch3 recipe_ipccwg1ar5ch9 recipe_collins13ipcc + recipe_examples Land ^^^^ diff --git a/doc/sphinx/source/recipes/recipe_autoassess_landsurface_soilmoisture.rst b/doc/sphinx/source/recipes/recipe_autoassess_landsurface_soilmoisture.rst index 9734498df8..5ba790b093 100644 --- a/doc/sphinx/source/recipes/recipe_autoassess_landsurface_soilmoisture.rst +++ b/doc/sphinx/source/recipes/recipe_autoassess_landsurface_soilmoisture.rst @@ -17,6 +17,7 @@ Performance metrics: Metrics are calculated using model and observation multi-year climatologies (seasonal means) for meteorological seasons: + * December-January-February (djf) * March-April-May (mam) * June-July-August (jja) @@ -38,7 +39,6 @@ Recipes are stored in esmvaltool/recipes/ Diagnostics are stored in esmvaltool/diag_scripts/autoassess/ - * autoassess_area_base.py: wrapper for autoassess scripts * land_surface_soilmoisture/soilmoisture.py: script to calculate soil moisture metrics * plot_autoassess_metrics.py: plot normalised assessment metrics @@ -47,21 +47,17 @@ Diagnostics are stored in esmvaltool/diag_scripts/autoassess/ User settings in recipe ----------------------- -#. Script autoassess_area_base.py +#. Script soilmoisture.py *Required settings for script* * area: must equal land_surface_soilmoisture to select this diagnostic * control_model: name of model to be used as control * exp_model: name of model to be used as experiment - * start: date (YYYY/MM/DD) at which period begins (see note on time gating) - * end: date (YYYY/MM/DD) at which period ends (see note on time gating) - * climfiles_root: path to observation climatologies *Optional settings for script* - * title: arbitrary string with name of diagnostic - * obs_models: unused for this recipe + none *Required settings for variables* @@ -97,7 +93,8 @@ User settings in recipe Variables --------- -* mrsos (land, monthly mean, longitude latitude time) +* mrsos (from models: land, monthly mean, longitude latitude time) +* sm (from observations: land, monthly mean, longitude latitude time) Observations and reformat scripts @@ -122,56 +119,3 @@ Example plots :alt: Soilmoisture_Metrics.png Normalised metrics plot comparing a control and experiment simulation - - -Additional notes on usage -------------------------- -The ``landsurface_soilmoisture`` area metric is part of the ``esmvaltool/diag_scripts/autoassess`` diagnostics, -and, as any other ``autoassess`` metric, it uses the ``autoassess_area_base.py`` as general purpose -wrapper. This wrapper accepts a number of input arguments that are read through from the recipe. - -This recipe is part of the larger group of Autoassess metrics ported to ESMValTool -from the native Autoassess package from the UK's Met Office. The ``diagnostics`` settings -are almost the same as for the other Autoassess metrics. - -**Currently this recipe is marked as broken, because it only runs on Jasmin due to a dependency on some -external climatology files.** - -.. note:: - - **Time gating for autoassess metrics.** - - To preserve the native Autoassess functionalities, - data loading and selection on time is done somewhat - differently for ESMValTool's autoassess metrics: the - time selection is done in the preprocessor as per usual but - a further time selection is performed as part of the diagnostic. - For this purpose the user will specify a ``start:`` and ``end:`` - pair of arguments of ``scripts: autoassess_script`` (see below - for example). These are formatted as ``YYYY/MM/DD``; this is - necessary since the Autoassess metrics are computed from 1-Dec - through 1-Dec rather than 1-Jan through 1-Jan. This is a temporary - implementation to fully replicate the native Autoassess functionality - and a minor user inconvenience since they need to set an extra set of - ``start`` and ``end`` arguments in the diagnostic; this will be phased - when all the native Autoassess metrics have been ported to ESMValTool - review has completed. - - -An example of standard inputs as read by ``autoassess_area_base.py`` and passed -over to the diagnostic/metric is listed below. - - -.. code-block:: yaml - - scripts: - autoassess_landsurf_soilmoisture: &autoassess_landsurf_soilmoisture_settings - script: autoassess/autoassess_area_base.py - title: "Autoassess Land-Surface Soilmoisture Diagnostic" - area: land_surface_soilmoisture - control_model: IPSL-CM5A-LR - exp_model: inmcm4 - obs_models: [] - start: 1997/12/01 - end: 2002/12/01 - climfiles_root: '/gws/nopw/j04/esmeval/autoassess_specific_files/files' # on JASMIN diff --git a/doc/sphinx/source/recipes/recipe_examples.rst b/doc/sphinx/source/recipes/recipe_examples.rst index 5874e2c1c4..e3c32c4337 100644 --- a/doc/sphinx/source/recipes/recipe_examples.rst +++ b/doc/sphinx/source/recipes/recipe_examples.rst @@ -8,20 +8,26 @@ Overview These are example recipes calling example diagnostic scripts. -The recipe examples/recipe_python.yml produces time series plots of global mean +The recipe ``examples/recipe_python.yml`` produces time series plots of global mean temperature and for the temperature in Amsterdam. It also produces a map of global temperature in January 2020. -The recipe examples/recipe_extract_shape.yml produces a map of the mean +The recipe ``examples/recipe_easy_ipcc.yml`` reproduces part of figure 9.3a from +`IPCC AR6 - Climate Change 2021: The Physical Science Basis `__. +It demonstrates how ESMValTool can be used to conveniently analyze +many models on their native grid and is described in detail in the blog post +`Analysis-ready climate data with ESMValCore `__. + +The recipe ``examples/recipe_extract_shape.yml`` produces a map of the mean temperature in the Elbe catchment over the years 2000 to 2002. Some example shapefiles for use with this recipe are available `here `__, make sure to download all files with the same name but different extensions. -The recipe examples/recipe_julia.yml produces a map plot with the mean temperature +The recipe ``examples/recipe_julia.yml`` produces a map plot with the mean temperature over the year 1997 plus a number that is configurable from the recipe. -The recipe examples/recipe_decadal.yml showcases how the ``timerange`` tag +The recipe ``examples/recipe_decadal.yml`` showcases how the ``timerange`` tag can be used to load datasets belonging to the DCPP activity. Produces timeseries plots comparing the global mean temperature of a DCPP dataset with an observational dataset. @@ -29,16 +35,16 @@ dataset. Available recipes and diagnostics --------------------------------- -Recipes are stored in esmvaltool/recipes/ - +Recipes are stored in `esmvaltool/recipes/ `__: * examples/recipe_python.yml + * examples/recipe_easy_ipcc.yml * examples/recipe_extract_shape.yml * examples/recipe_julia.yml * examples/recipe_decadal.yml -Diagnostics are stored in esmvaltool/diag_scripts/ - +Diagnostics are stored in `esmvaltool/diag_scripts/ `__: * examples/diagnostic.py: visualize results and store provenance information + * examples/make_plot.py: Create a timeseries plot with likely ranges * examples/diagnostic.jl: visualize results and store provenance information * examples/decadal_example.py: visualize results and store provenance information @@ -63,6 +69,7 @@ Variables --------- * tas (atmos, monthly, longitude, latitude, time) +* tos (ocean, monthly, longitude, latitude, time) Example plots ------------- @@ -79,6 +86,12 @@ Example plots Amsterdam air temperature (multimodel mean of CMIP5 CanESM2 and CMIP6 BCC-ESM1). +.. _easy_ipcc: +.. figure:: /recipes/figures/examples/IPCC_AR6_figure_9.3a_1850-2100.png + :align: center + + Mean sea surface temperature anomaly (part of figure 9.3a from IPCC AR6). + .. _elbe: .. figure:: /recipes/figures/examples/elbe.png :align: center diff --git a/doc/sphinx/source/recipes/recipe_iht_toa.rst b/doc/sphinx/source/recipes/recipe_iht_toa.rst new file mode 100644 index 0000000000..87e182ef6a --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_iht_toa.rst @@ -0,0 +1,123 @@ +.. _recipes_iht_toa: + +Implied heat transport from Top of Atmosphere fluxes +==================================================== + +Overview +-------- + +This recipe calculates the implied horizontal heat transport (IHT) due to the +spatial anomalies of radiative fluxes at the top of the atmosphere (TOA). +The regional patterns of implied heat transport for different components of +the TOA fluxes are calculated by solving the Poisson equation with the flux +components as source terms. +It reproduces the plots in `Pearce and Bodas-Salcedo (2023)`_ when the input +data is CERES EBAF. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in esmvaltool/recipes/ + +* recipe_iht_toa.yml calculates the IHT maps for the following radiative fluxes: + + * Total net, SW net, LW net (Figure 2). + * Total CRE, SW CRE, LW CRE (Figure 4). + * All-sky and clear-sky reflected SW (Figure 5). + * The meridional heat transports (MHT) of the fluxes above (Figures 1 and 3). + +Diagnostics are stored in esmvaltool/diag_scripts/iht_toa/ + +* single_model_diagnostics.py: driver script that produces the plots. +* poisson_solver.py: solver that calculates the IHTs. + +.. _`Pearce and Bodas-Salcedo (2023)`: https://doi.org/10.1175/JCLI-D-22-0149.1 + +User settings in recipe +----------------------- +There are no user settings in this recipe. + +Variables +--------- + +* rlut (atmos, monthly, longitude latitude time) +* rlutcs (atmos, monthly, longitude latitude time) +* rsutcs (atmos, monthly, longitude latitude time) +* rsut (atmos, monthly, longitude latitude time) +* rsdt (atmos, monthly, longitude latitude time) + +Observations and reformat scripts +--------------------------------- + +* CERES-EBAF + +References +---------- + +* Pearce, F. A., and A. Bodas-Salcedo, 2023: Implied Heat Transport from CERES + Data: Direct Radiative Effect of Clouds on Regional Patterns and Hemispheric + Symmetry. J. Climate, 36, 4019–4030, doi: 10.1175/JCLI-D-22-0149.1. + +Example plots +------------- + +.. _fig_iht_toa_1: +.. figure:: /recipes/figures/iht_toa/figure1_CERES-EBAF_CERES-EBAF.png + :align: center + + The implied heat transport due to the total net flux (blue), split into + the contributions from the SW (orange) and LW (green). + +.. _fig_iht_toa_2: +.. figure:: /recipes/figures/iht_toa/figure2_CERES-EBAF_CERES-EBAF.png + :align: center + + The energy flux potentials for (a) TOT, (c) SW, and (e) LW fluxes, + alongside maps of the spatial anomalies of the fluxes [(b),(d),(f) + flux minus global average flux, respectively]. + The implied heat transport is calculated as the gradient of the energy + flux potential, shown by the white vector arrows for a subset of points + to give the overall transport pattern. + Heat is directed from the blue minima of the potential field to + yellow maxima, with the magnitude implied by the density of contours. + All maps of the same type share the same color bar at the bottom + of the column. + +.. _fig_iht_toa_3: +.. figure:: /recipes/figures/iht_toa/figure3_CERES-EBAF_CERES-EBAF.png + :align: center + + Direct radiative effects of clouds on the meridional heat transport. + (a) Contributions from TOT CRE (blue), SW CRE (orange), and LW CRE (green) + fluxes. (b) Contributions from all-sky and clear-sky OSR. + In (b), both curves have been multiplied by −1 such that positive heat + transport is northward. + +.. _fig_iht_toa_4: +.. figure:: /recipes/figures/iht_toa/figure4_CERES-EBAF_CERES-EBAF.png + :align: center + + As in :numref:`fig_iht_toa_2`, but for the implied heat transport associated with + (a),(b) TOT CRE, (c),(d) SW CRE, and (e),(f) LW CRE fluxes. + +.. _fig_iht_toa_5: +.. figure:: /recipes/figures/iht_toa/figure5_CERES-EBAF_CERES-EBAF.png + :align: center + + As in :numref:`fig_iht_toa_2`, but for (a), (b) clear-sky and (c), (d) all-sky reflected + SW flux. + +.. _fig_iht_toa_6: +.. figure:: /recipes/figures/iht_toa/figure6_CERES-EBAF_CERES-EBAF.png + :align: center + + A measure of the symmetry between heat transport in the Northern and + Southern Hemispheres, calculated for the 12-month running mean of TOT MHT + in the regions: (a) the full hemisphere, (b) from the equator to 30°, and + (c) 30° to 90°. + Symmetry values obtained when including (blue) and excluding (orange) + the effect of clouds. The climatological symmetry values for the two cases + are shown as the black lines in each subplot, dashed and dotted, + respectively. + The standard deviations of the time series are shown in each plot. diff --git a/doc/sphinx/source/recipes/recipe_model_evaluation.rst b/doc/sphinx/source/recipes/recipe_model_evaluation.rst new file mode 100644 index 0000000000..9e199815e0 --- /dev/null +++ b/doc/sphinx/source/recipes/recipe_model_evaluation.rst @@ -0,0 +1,98 @@ +.. _recipe_model_evaluation: + +General model evaluation +======================== + +Overview +-------- + +These recipes and diagnostics provide a basic climate model evaluation with +observational data. +This is especially useful to get an overview of the performance of a +simulation. +The diagnostics used here allow plotting arbitrary preprocessor output, i.e., +arbitrary variables from arbitrary datasets. + + +Available recipes and diagnostics +--------------------------------- + +Recipes are stored in `recipes/model_evaluation` + +* recipe_model_evaluation_basics.yml +* recipe_model_evaluation_clouds_clim.yml +* recipe_model_evaluation_clouds_cycles.yml +* recipe_model_evaluation_precip_zonal.yml + +Diagnostics are stored in `diag_scripts/monitor/` + +* :ref:`multi_datasets.py + `: + Monitoring diagnostic to show multiple datasets in one plot (incl. biases). + + +User settings +------------- + +It is recommended to use a vector graphic file type (e.g., SVG) for the output +format when running this recipe, i.e., run the recipe with the command line +option ``--output_file_type=svg`` or use ``output_file_type: svg`` in your +:ref:`esmvalcore:user configuration file`. +Note that map and profile plots are rasterized by default. +Use ``rasterize: false`` in the recipe to disable +this. + + +Recipe settings +~~~~~~~~~~~~~~~ + +A list of all possible configuration options that can be specified in the +recipe is given for each diagnostic individually (see links given for the +available diagnostics in the previous section). + + +Variables +--------- + +Any, but the variables' number of dimensions should match the ones expected by +each diagnostic (see links given for the available diagnostics in the previous +section). + + +Example plots +------------- + +.. _fig_1: +.. figure:: /recipes/figures/model_evaluation/map_tas_MPI-ESM1-2-HR_Amon.jpg + :align: center + :width: 14cm + +Global climatology of 2m near-surface air temperature. + +.. _fig_2: +.. figure:: /recipes/figures/model_evaluation/map_swcre_MPI-ESM1-2-HR_Amon.jpg + :align: center + :width: 14cm + +Global climatology of the shortwave cloud radiative effect (SWCRE). + +.. _fig_3: +.. figure:: /recipes/figures/model_evaluation/timeseries_rtnt_ambiguous_dataset_Amon.jpg + :align: center + :width: 14cm + +Time series of the global mean top-of-the-atmosphere net radiative flux. + +.. _fig_4: +.. figure:: /recipes/figures/model_evaluation/variable_vs_lat_pr_Amon.jpg + :align: center + :width: 14cm + +Zonal mean precipitation. + +.. _fig_5: +.. figure:: /recipes/figures/model_evaluation/annual_cycle_clt_southerocean_Amon.jpg + :align: center + :width: 14cm + +Annual cycle of Southern Ocean total cloud cover. diff --git a/doc/sphinx/source/recipes/recipe_monitor.rst b/doc/sphinx/source/recipes/recipe_monitor.rst index 0358ec36a7..ee3b9b44fa 100644 --- a/doc/sphinx/source/recipes/recipe_monitor.rst +++ b/doc/sphinx/source/recipes/recipe_monitor.rst @@ -18,19 +18,18 @@ Available recipes and diagnostics Recipes are stored in `recipes/monitor` - * recipe_monitor.yml - * recipe_monitor_with_refs.yml +* recipe_monitor.yml +* recipe_monitor_with_refs.yml Diagnostics are stored in `diag_scripts/monitor/` - * :ref:`monitor.py `: - Monitoring diagnostic to plot arbitrary preprocessor output. - * :ref:`compute_eofs.py `: - Monitoring diagnostic to plot EOF maps and associated PC timeseries. - * :ref:`multi_datasets.py - `: - Monitoring diagnostic to show multiple datasets in one plot (incl. - biases). +* :ref:`monitor.py `: + Monitoring diagnostic to plot arbitrary preprocessor output. +* :ref:`compute_eofs.py `: + Monitoring diagnostic to plot EOF maps and associated PC timeseries. +* :ref:`multi_datasets.py + `: + Monitoring diagnostic to show multiple datasets in one plot (incl. biases). User settings @@ -210,3 +209,24 @@ Zonal mean profile of ta including a reference dataset. :width: 14cm 1D profile of ta including a reference dataset. + +.. _fig_variable_vs_lat_with_ref: +.. figure:: /recipes/figures/monitor/variable_vs_lat_with_ref.png + :align: center + :width: 14cm + +Zonal mean pr including a reference dataset. + +.. _fig_hovmoeller_z_vs_time_with_ref: +.. figure:: /recipes/figures/monitor/hovmoeller_z_vs_time_with_ref.png + :align: center + :width: 14cm + +Hovmoeller plot (pressure vs. time) of ta including a reference dataset. + +.. _fig_hovmoeller_time_vs_lat_with_ref: +.. figure:: /recipes/figures/monitor/hovmoeller_time_vs_lat_with_ref.png + :align: center + :width: 14cm + +Hovmoeller plot (time vs. latitude) of tas including a reference dataset diff --git a/doc/sphinx/source/utils.rst b/doc/sphinx/source/utils.rst index 5e46d06d81..e5e7b00553 100644 --- a/doc/sphinx/source/utils.rst +++ b/doc/sphinx/source/utils.rst @@ -246,7 +246,8 @@ Optionally, the following parameters can be edited: * ``partition``, *str*: Name of the DKRZ partition used to run jobs. Default is ``interactive`` to minimize computing cost compared to ``compute`` for which nodes cannot be shared. * ``memory``, *str*: Amount of memory requested for each run. Default is ``64G`` to allow to run 4 recipes on the same node in parallel. * ``time``, *str*: Time limit. Default is ``04:00:00`` to increase the job priority. Jobs can run for up to 8 hours and 12 hours on the compute and interactive partitions, respectively. - +* ``default_max_parallel_tasks``, *int*: Default is ``8`` which works for most recipes. For other cases, an entry needs to be made to the ``MAX_PARALLEL_TASKS`` dictionary (see below). + The script will generate a submission script for all recipes using by default the ``interactive`` queue and with a time limit of 4h. In case a recipe may require of additional resources, they can be defined in the ``SPECIAL_RECIPES`` dictionary. The recipe name has to be given as a ``key`` in which the values are another dictionary. @@ -263,8 +264,8 @@ given by the slurm flags ``--mem``, ``--constraint`` or ``--ntasks``. In general }, } -Some recipes can only be run with ``--max_parallel_tasks=1`` for various reasons (memory issues, diagnostic issues, CMIP3 data used). -These recipes need to be added to the ``ONE_TASK_RECIPES`` list. +Some recipes can only be run with a number of tasks less than ``default_max_parallel_tasks`` for various reasons (memory issues, diagnostic issues, CMIP3 data used). +These recipes need to be added to the ``MAX_PARALLEL_TASKS`` dictionary with a specific ``max_parallel_tasks`` value. Note that the script has been optimized to use standard SLURM settings to run most recipes while minimizing the computational cost of the jobs and tailored runtime settings for resource-intensive recipes. It is only necessary to edit this script for recipes that have been added since the last release and cannot be run with the default settings. diff --git a/environment.yml b/environment.yml index 7f060c5437..fb2e5adb14 100644 --- a/environment.yml +++ b/environment.yml @@ -24,7 +24,7 @@ dependencies: - ecmwf-api-client - eofs - esmpy - - esmvalcore 2.9.* + - esmvalcore =2.10.0rc1 - fiona - fire - gdal @@ -38,6 +38,7 @@ dependencies: - natsort - nc-time-axis - netCDF4 + - numba - numpy !=1.24.3 # severe masking bug - packaging - openpyxl diff --git a/environment_osx.yml b/environment_osx.yml index 64d3a142b9..0734dee2c6 100644 --- a/environment_osx.yml +++ b/environment_osx.yml @@ -24,7 +24,7 @@ dependencies: - ecmwf-api-client - eofs - esmpy - - esmvalcore 2.9.* + - esmvalcore =2.10.0rc1 - fiona - fire - gdal diff --git a/esmvaltool/cmorizers/data/cmor_config/MOBO-DIC2004-2019.yml b/esmvaltool/cmorizers/data/cmor_config/MOBO-DIC2004-2019.yml new file mode 100644 index 0000000000..7e80dc6634 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/MOBO-DIC2004-2019.yml @@ -0,0 +1,19 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: MOBO-DIC2004-2019 + version: '2.3' + tier: 2 + modeling_realm: reanaly + project_id: OBS6 + source: 'https://www.nodc.noaa.gov/archive/arc0211/0277099/2.3/data/0-data/' + reference: 'mobo_dic2004_2019' + +# Variables to cmorize +variables: + dissic: + filename: MPI_MOBO-DIC_2004-2019_v2.nc + mip: Omon + raw_name: DIC + raw_units: '1e-6 mol kg-1' + comment: 'The original units of this variable are mumol/kg. To convert to the CMOR units mol/m3, we assume a constant sea water density of 1032 kg/m3, which is approximately the sea water density for T=4°C, salinity=35PSU, and p=100bar according to the UNESCO formula (UNESCO, 1981, Tenth report of the joint panel on oceanographic tables and standards, UNESCO Technical Papers in Marine Science, see https://www.wkcgroup.com/tools-room/seawater-density-calculator/ and https://link.springer.com/content/pdf/bbm:978-3-319-18908-6/1.pdf).' diff --git a/esmvaltool/cmorizers/data/cmor_config/NOAA-CIRES-20CR.yml b/esmvaltool/cmorizers/data/cmor_config/NOAA-CIRES-20CR-V2.yml similarity index 88% rename from esmvaltool/cmorizers/data/cmor_config/NOAA-CIRES-20CR.yml rename to esmvaltool/cmorizers/data/cmor_config/NOAA-CIRES-20CR-V2.yml index cb3ccb38a8..7591e99257 100644 --- a/esmvaltool/cmorizers/data/cmor_config/NOAA-CIRES-20CR.yml +++ b/esmvaltool/cmorizers/data/cmor_config/NOAA-CIRES-20CR-V2.yml @@ -1,13 +1,13 @@ --- # Global attributes of NetCDF file attributes: - dataset_id: NOAA-CIRES-20CR + dataset_id: NOAA-CIRES-20CR-V2 project_id: OBS6 tier: 2 version: 'v2' modeling_realm: reanaly source: 'https://psl.noaa.gov/data/gridded/data.20thC_ReanV2.html' - reference: 'noaa-cires-20cr' + reference: 'noaa-cires-20cr-v2' comment: | '' diff --git a/esmvaltool/cmorizers/data/cmor_config/NOAA-CIRES-20CR-V3.yml b/esmvaltool/cmorizers/data/cmor_config/NOAA-CIRES-20CR-V3.yml new file mode 100644 index 0000000000..d16d5265e0 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/NOAA-CIRES-20CR-V3.yml @@ -0,0 +1,56 @@ +--- +# Global attributes of NetCDF file +attributes: + dataset_id: NOAA-CIRES-20CR-V3 + project_id: OBS6 + tier: 2 + version: 'v3' + modeling_realm: reanaly + source: 'https://psl.noaa.gov/data/gridded/data.20thC_ReanV3.html' + reference: 'noaa-cires-20cr-v3' + comment: | + '' + +# Variables to CMORize +variables: + # monthly frequency + clt_month: + short_name: clt + mip: Amon + raw: tcdc + file: 'tcdc.eatm.mon.mean.nc' + clwvi_month: + short_name: clwvi + mip: Amon + raw: cldwtr + file: 'cldwtr.eatm.mon.mean.nc' + prw_month: + short_name: prw + mip: Amon + raw: pr_wtr + file: 'pr_wtr.eatm.mon.mean.nc' + hus_month: + short_name: hus + mip: Amon + raw: shum + file: 'shum.mon.mean.nc' + rlut_month: + short_name: rlut + mip: Amon + raw: ulwrf + file: 'ulwrf.ntat.mon.mean.nc' + rsut_month: + short_name: rsut + mip: Amon + raw: uswrf + file: 'uswrf.ntat.mon.mean.nc' + rlutcs_month: + short_name: rlutcs + mip: Amon + raw: csulf + file: 'csulf.ntat.mon.mean.nc' + rsutcs_month: + short_name: rsutcs + mip: Amon + raw: csusf + file: 'csusf.ntat.mon.mean.nc' diff --git a/esmvaltool/cmorizers/data/cmor_config/NOAA-MBL-CH4.yml b/esmvaltool/cmorizers/data/cmor_config/NOAA-MBL-CH4.yml new file mode 100644 index 0000000000..23e84a657d --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/NOAA-MBL-CH4.yml @@ -0,0 +1,18 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: NOAA-MBL-CH4 + version: '1.0' + tier: 2 + modeling_realm: atmos + project_id: OBS6 + source: 'https://gml.noaa.gov/webdata/ccgg/trends/ch4/ch4_mm_gl.csv' + reference: 'noaa-mbl-ch4' + +# Variables to cmorize +variables: + ch4s: + filename: ch4_mm_gl.csv + mip: Amon + raw_name: ch4 + raw_units: 'nmol mol-1' diff --git a/esmvaltool/cmorizers/data/datasets.yml b/esmvaltool/cmorizers/data/datasets.yml index 1fa222d818..b29ab98412 100644 --- a/esmvaltool/cmorizers/data/datasets.yml +++ b/esmvaltool/cmorizers/data/datasets.yml @@ -832,6 +832,13 @@ datasets: individually or by using the option "download all". Data is freely available, but a registration is required. + MERRA: + tier: 3 + source: https://goldsmr3.gesdisc.eosdis.nasa.gov/data/MERRA_MONTHLY/ + last_access: 2023-02-01 + info: | + Use automatic download. That will download monthly data. + MERRA2: tier: 3 source: https://goldsmr4.gesdisc.eosdis.nasa.gov/data/MERRA2_MONTHLY/ https://goldsmr5.gesdisc.eosdis.nasa.gov/data/MERRA2_MONTHLY/ @@ -862,6 +869,13 @@ datasets: info: | Download the file MOBO-DIC_MPIM_monthly_clim.nc + MOBO-DIC2004-2019: + tier: 2 + source: https://www.nodc.noaa.gov/archive/arc0211/0277099/2.3/data/0-data/ + last_access: 2023-10-09 + info: | + Download the file MPI_MOBO-DIC_2004-2019_v2.nc + MODIS: tier: 3 source: https://ladsweb.modaps.eosdis.nasa.gov/search/order @@ -981,7 +995,7 @@ datasets: https://nsidc.org/data/NSIDC-0116 Login required for download, and also requires citation only to use - NOAA-CIRES-20CR: + NOAA-CIRES-20CR-V2: tier: 2 source: ftp.cdc.noaa.gov/Projects/20thC_ReanV2/Monthlies/ last_access: 2022-11-17 @@ -994,6 +1008,21 @@ datasets: gaussian/monolevel/ulwrf.ntat.mon.mean.nc gaussian/monolevel/uswrf.ntat.mon.mean.nc + NOAA-CIRES-20CR-V3: + tier: 2 + source: ftp.cdc.noaa.gov/Projects/20thC_ReanV3/Monthlies/ + last_access: 2023-03-27 + info: | + Download the following files: + miscSI-MO/cldwtr.eatm.mon.mean.nc + miscSI-MO/pr_wtr.eatm.mon.mean.nc + prsSI-MO/shum.mon.mean.nc + miscMO/tcdc.eatm.mon.mean.nc + ntatFlxSI-MO/ulwrf.ntat.mon.mean.nc + ntatFlxSI-MO/uswrf.ntat.mon.mean.nc + ntatFlxSI-MO/csulf.ntat.mon.mean.nc + ntatFlxSI-MO/csusf.ntat.mon.mean.nc + NOAAGlobalTemp: tier: 2 source: https://www.ncei.noaa.gov/data/noaa-global-surface-temperature/v5/access/ @@ -1002,6 +1031,14 @@ datasets: Download the following files: [SOURCE]/gridded/NOAAGlobalTemp_v5.0.0_gridded_s188001_e202205_c20220608T133245.nc + NOAA-MBL-CH4: + tier: 2 + source: https://gml.noaa.gov/webdata/ccgg/trends/ch4/ch4_mm_gl.csv + last_access: 2023-07-17 + info: | + Download the following file: + https://gml.noaa.gov/webdata/ccgg/trends/ch4/ch4_mm_gl.csv + NSIDC-0116-sh: tier: 3 source: https://nsidc.org/data/NSIDC-0116 diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/merra.py b/esmvaltool/cmorizers/data/downloaders/datasets/merra.py new file mode 100644 index 0000000000..df1d0ff7e9 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/merra.py @@ -0,0 +1,57 @@ +"""Script to download MERRA.""" + +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.wget import NASADownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + if not start_date: + start_date = datetime(1979, 1, 1) + if not end_date: + end_date = datetime(2015, 12, 31) + loop_date = start_date + + downloader = NASADownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + while loop_date <= end_date: + year = loop_date.year + downloader.download_folder( + "https://goldsmr3.gesdisc.eosdis.nasa.gov/data/MERRA_MONTHLY/" + f"MAIMNXINT.5.2.0/{year}/") + downloader.download_folder( + "https://goldsmr3.gesdisc.eosdis.nasa.gov/data/MERRA_MONTHLY/" + f"MAIMCPASM.5.2.0/{year}/") + downloader.download_folder( + "https://goldsmr3.gesdisc.eosdis.nasa.gov/data/MERRA_MONTHLY/" + f"MATMNXRAD.5.2.0/{year}/") + downloader.download_folder( + "https://goldsmr3.gesdisc.eosdis.nasa.gov/data/MERRA_MONTHLY/" + f"MATMFXCHM.5.2.0/{year}/") + + loop_date += relativedelta.relativedelta(years=1) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/mobo_dic2004_2019.py b/esmvaltool/cmorizers/data/downloaders/datasets/mobo_dic2004_2019.py new file mode 100644 index 0000000000..1299981811 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/mobo_dic2004_2019.py @@ -0,0 +1,39 @@ +"""Script to download MOBO-DIC2004-2019.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + downloader.download_file( + "https://www.nodc.noaa.gov/archive/arc0211/0277099/2.3/data/0-data/" + "MPI_MOBO-DIC_2004-2019_v2.nc", + wget_options=[], + ) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/noaa_cires_20cr.py b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_cires_20cr_v2.py similarity index 97% rename from esmvaltool/cmorizers/data/downloaders/datasets/noaa_cires_20cr.py rename to esmvaltool/cmorizers/data/downloaders/datasets/noaa_cires_20cr_v2.py index 836cfe8391..fb2d733f06 100644 --- a/esmvaltool/cmorizers/data/downloaders/datasets/noaa_cires_20cr.py +++ b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_cires_20cr_v2.py @@ -1,4 +1,4 @@ -"""Script to download NOAA-CIRES-20CR.""" +"""Script to download NOAA-CIRES-20CR-V2.""" import logging from esmvaltool.cmorizers.data.downloaders.ftp import FTPDownloader diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/noaa_cires_20cr_v3.py b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_cires_20cr_v3.py new file mode 100644 index 0000000000..67f1a38f33 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_cires_20cr_v3.py @@ -0,0 +1,53 @@ +"""Script to download NOAA-CIRES-20CR-V3.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.ftp import FTPDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = FTPDownloader( + config=config, + server='ftp.cdc.noaa.gov', + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + downloader.connect() + + downloader.set_cwd("Datasets/20thC_ReanV3/Monthlies/") + downloader.download_file("miscSI-MO/cldwtr.eatm.mon.mean.nc", + sub_folder='surface') + downloader.download_file("miscSI-MO/pr_wtr.eatm.mon.mean.nc", + sub_folder='surface') + downloader.download_file("prsSI-MO/shum.mon.mean.nc", + sub_folder='pressure') + downloader.download_file("miscMO/tcdc.eatm.mon.mean.nc", + sub_folder='surface') + downloader.download_file("ntatFlxSI-MO/ulwrf.ntat.mon.mean.nc", + sub_folder='surface') + downloader.download_file("ntatFlxSI-MO/uswrf.ntat.mon.mean.nc", + sub_folder='surface') + downloader.download_file("ntatFlxSI-MO/csulf.ntat.mon.mean.nc", + sub_folder='surface') + downloader.download_file("ntatFlxSI-MO/csusf.ntat.mon.mean.nc", + sub_folder='surface') diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/noaa_mbl_ch4.py b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_mbl_ch4.py new file mode 100644 index 0000000000..3cbf701c97 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_mbl_ch4.py @@ -0,0 +1,38 @@ +"""Script to download NOAA-MBL-CH4.""" +import logging + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + downloader.download_file( + "https://gml.noaa.gov/webdata/ccgg/trends/ch4/ch4_mm_gl.csv", + wget_options=[], + ) diff --git a/esmvaltool/cmorizers/data/downloaders/wget.py b/esmvaltool/cmorizers/data/downloaders/wget.py index 413066c60b..2afcca1d5a 100644 --- a/esmvaltool/cmorizers/data/downloaders/wget.py +++ b/esmvaltool/cmorizers/data/downloaders/wget.py @@ -103,8 +103,8 @@ def download_folder(self, server_path, wget_options=None): """ if wget_options is None: wget_options = [] - wget_options = self._wget_common_options + ["-np", "--accept=nc,nc4" - ] + wget_options + wget_options = self._wget_common_options + [ + "-np", "--accept=nc,nc4,hdf"] + wget_options super().download_folder(server_path, wget_options) def download_file(self, server_path, wget_options=None): diff --git a/esmvaltool/cmorizers/data/formatters/datasets/esacci_watervapour.py b/esmvaltool/cmorizers/data/formatters/datasets/esacci_watervapour.py index c5ec55bb16..0985e5d6e6 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/esacci_watervapour.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/esacci_watervapour.py @@ -23,7 +23,7 @@ import os import iris -from esmvalcore.cmor.check import _get_time_bounds +from esmvalcore.cmor.fixes import get_time_bounds from esmvalcore.preprocessor import concatenate from ...utilities import ( @@ -83,7 +83,7 @@ def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): yearly_cube = concatenate(monthly_cubes) # Fix monthly time bounds time = yearly_cube.coord('time') - time.bounds = _get_time_bounds(time, 'mon') + time.bounds = get_time_bounds(time, 'mon') save_variable(yearly_cube, var, out_dir, diff --git a/esmvaltool/cmorizers/data/formatters/datasets/merra.ncl b/esmvaltool/cmorizers/data/formatters/datasets/merra.ncl new file mode 100644 index 0000000000..b57bca6a09 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/merra.ncl @@ -0,0 +1,315 @@ +; ############################################################################# +; ESMValTool CMORizer for NASA MERRA reanalysis v5.2.0 +; ############################################################################# +; +; Tier +; Tier 3: restricted dataset (registration required). +; +; Source +; EarthData via https://goldsmr3.gesdisc.eosdis.nasa.gov/data/MERRA_MONTHLY/ +; +; Last access +; 20230201 +; +; Download and processing instructions +; (requires EarthData login; see https://urs.earthdata.nasa.gov/) +; Use ESMValTool automatic download: +; esmvaltool data download --config_file MERRA +; +; Modification history +; 20230818-lauer_axel: added output of clwvi (iwp + lwp) +; 20230201-lauer_axel: written +; +; ############################################################################# +loadscript(getenv("esmvaltool_root") + \ + "/data/formatters/interface.ncl") + +begin + + ; Script name (for logger) + DIAG_SCRIPT = "merra.ncl" + + ; Source name + OBSNAME = "MERRA" + + ; Tier + TIER = 3 + + ; Period (complete years only) + YEAR1 = get_year(start_year, 1979) + YEAR2 = get_year(end_year, 2015) + + ; Selected variable (standard name) + VAR = (/"zg", "wap", "cli", "clw", "hus", "hur", "ta", "ua", "va", \ + "ps", "psl", \ + "clivi", "clwvi", "lwp", "prw", \ + "clt", "rlut", "rlutcs", "rsdt", "rsut", "rsutcs", "ts", \ + "tas", "pr"/) + + ; Name in the raw data + NAME = (/"H", "OMEGA", "QI", "QL", "QV", "RH", "T", "U", "V", \ ; 3d asm + "PS", "SLP", \ + "TQI", "TQI", "TQL", "TQV", \ ; 2d int + "CLDTOT", "LWTUP", "LWTUPCLR", "SWTDN", "SWTNT", \ ; 2d rad + "SWTNTCLR", "TS", \ + "T2M", "PRECLSC"/) ; 2d chm + + ; unit conversion factor + CONVFAC = (/1.0, 1.0, 1.0, 1.0, 1.0, 100.0, 1.0, 1.0, 1.0, \ ; 3d asm + 1.0, 1.0, \ + 1.0, 1.0, 1.0, 1.0, \ ; 2d int + 100.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, \ ; 2d rad + 1.0, 1.0/) ; 2d chm + + ; additional attribute "positive" (for radiative fluxes) + POSITIVE = (/"", "", "", "", "", "", "", "", "", "", "", \ ; 3d asm + "", "", "", "", \ ; 2d int + "", "up", "up", "down", "up", "up", "", \ ; 2d rad + "", ""/) ; 2d chm + + ; names source files + SOURCEFILE = (/"instM_3d_asm_Cp.", "instM_3d_asm_Cp.", \ ; 3d asm + "instM_3d_asm_Cp.", "instM_3d_asm_Cp.", \ + "instM_3d_asm_Cp.", "instM_3d_asm_Cp.", \ + "instM_3d_asm_Cp.", "instM_3d_asm_Cp.", \ + "instM_3d_asm_Cp.", "instM_3d_asm_Cp.", \ + "instM_3d_asm_Cp.", \ + "instM_2d_int_Nx.", "instM_2d_int_Nx.", \ ; 2d int + "instM_2d_int_Nx.", "instM_2d_int_Nx.", \ + "tavgM_2d_rad_Nx.", "tavgM_2d_rad_Nx.", \ ; 2d rad + "tavgM_2d_rad_Nx.", "tavgM_2d_rad_Nx.", \ + "tavgM_2d_rad_Nx.", "tavgM_2d_rad_Nx.", \ + "tavgM_2d_rad_Nx.", \ + "tavgM_2d_chm_Fx.", "tavgM_2d_chm_Fx."/) ; 2d chm + + ; dataset doi numbers + DOI = (/"10.5067/YX0AVASQRTNW", "10.5067/YX0AVASQRTNW", \ ; 3d asm + "10.5067/YX0AVASQRTNW", "10.5067/YX0AVASQRTNW", \ + "10.5067/YX0AVASQRTNW", "10.5067/YX0AVASQRTNW", \ + "10.5067/YX0AVASQRTNW", "10.5067/YX0AVASQRTNW", \ + "10.5067/YX0AVASQRTNW", "10.5067/YX0AVASQRTNW", \ + "10.5067/YX0AVASQRTNW", \ + "10.5067/QL0PGBK2CYJS", "10.5067/QL0PGBK2CYJS", \ ; 2d int + "10.5067/QL0PGBK2CYJS", "10.5067/QL0PGBK2CYJS", \ + "10.5067/6UX3EDUNVUFK", "10.5067/6UX3EDUNVUFK", \ ; 2d rad + "10.5067/6UX3EDUNVUFK", "10.5067/6UX3EDUNVUFK", \ + "10.5067/6UX3EDUNVUFK", "10.5067/6UX3EDUNVUFK", \ + "10.5067/6UX3EDUNVUFK", \ + "10.5067/IYDN3LNZ63UE", "10.5067/IYDN3LNZ63UE"/) ; 2d chm + + ; MIP + MIP = "Amon" + + ; Frequency + FREQ = "mon" + + ; CMOR table + CMOR_TABLE = getenv("cmor_tables") + \ + (/"/cmip5/Tables/CMIP5_" + MIP, "/cmip5/Tables/CMIP5_" + MIP, \ ; 3d asm + "/cmip5/Tables/CMIP5_" + MIP, "/cmip5/Tables/CMIP5_" + MIP, \ + "/cmip5/Tables/CMIP5_" + MIP, "/cmip5/Tables/CMIP5_" + MIP, \ + "/cmip5/Tables/CMIP5_" + MIP, "/cmip5/Tables/CMIP5_" + MIP, \ + "/cmip5/Tables/CMIP5_" + MIP, "/cmip5/Tables/CMIP5_" + MIP, \ + "/cmip5/Tables/CMIP5_" + MIP, \ + "/cmip5/Tables/CMIP5_" + MIP, "/cmip5/Tables/CMIP5_" + MIP, \ ; 2d int + "/custom/CMOR_lwp.dat", "/cmip5/Tables/CMIP5_" + MIP, \ + "/cmip5/Tables/CMIP5_" + MIP, "/cmip5/Tables/CMIP5_" + MIP, \ ; 2d rad + "/cmip5/Tables/CMIP5_" + MIP, "/cmip5/Tables/CMIP5_" + MIP, \ + "/cmip5/Tables/CMIP5_" + MIP, "/cmip5/Tables/CMIP5_" + MIP, \ + "/cmip5/Tables/CMIP5_" + MIP, \ + "/cmip5/Tables/CMIP5_" + MIP, "/cmip5/Tables/CMIP5_" + MIP/) ; 2d chm + + ; Type + TYPE = "reanaly" + + ; Version + VERSION = "5.2.0" + + ; Global attributes + SOURCE = "https://goldsmr3.gesdisc.eosdis.nasa.gov/data/MERRA_MONTHLY/" + COMMENT = "Goddard Earth Sciences Data and Information Services Center " + \ + "(GES DISC)" + +end + +begin + + ; Loop over variables + do vv = 0, dimsizes(VAR) - 1 + + log_info("Processing " + VAR(vv) + " (" + MIP + ")") + if (isvar("output")) then + delete(output) + end if + + do yy = YEAR1, YEAR2 + do mm = 1, 12 + + datestr = tostring(yy) + if (mm .lt. 10) then + datestr = datestr + "0" + end if + datestr = datestr + tostring(mm) + + fname = systemfunc("ls " + input_dir_path + "MERRA???.prod.assim." + \ + SOURCEFILE(vv) + datestr + ".hdf") + + f = addfile(fname, "r") + tmp = f->$NAME(vv)$ + + ; Extract time range + tmp&TIME_EOSGRID@calendar = "standard" + date = cd_calendar(tmp&TIME_EOSGRID, 0) + if ((date(0, 0) .ne. yy) .or. (date(0, 1) .ne. mm)) then + error_msg("f", DIAG_SCRIPT, "", \ + "date in input file does not match date in filename: " + \ + fname) + end if + + delete(date) + + if (.not.isvar("output")) then + dims = dimsizes(tmp) + ; overwrite time dimension + dims(0) = 12 + output = new(dims, float) + delete(dims) + rank = dimsizes(dimsizes(output)) + + output!0 = "time" + if (rank.eq.4) then + output!1 = "plev" + output!2 = "lat" + output!3 = "lon" + output&plev = tmp&Height_EOSGRID * 100. ; [hPa] --> [Pa] + elseif (rank.eq.3) + output!1 = "lat" + output!2 = "lon" + end if + + output&time = fspan(1, 12, 12) + output&time@calendar = "standard" + output&time@units = "days since 1950-01-01 00:00:00" + + output&lat = tmp&YDim_EOSGRID + output&lon = tmp&XDim_EOSGRID + + end if + + ; Unpack variable according to metadata information + if (isatt(tmp, "scale_factor") .or. isatt(tmp, "add_offset")) then + tmp = tmp * tmp@scale_factor + tmp@add_offset + end if + + if (rank.eq.4) then + output(mm - 1, :, :, :) = (/ tmp(0, :, :, :) /) + else + output(mm - 1, :, :) = (/ tmp(0, :, :) /) + end if + + delete(tmp) + + ; calcuation of outgoing fluxes: out = in - net + if ((VAR(vv) .eq. "rsut") .or. (VAR(vv) .eq. "rsutcs")) then + tmp = f->SWTDN + if (isatt(tmp, "scale_factor") .or. isatt(tmp, "add_offset")) then + tmp = tmp * tmp@scale_factor + tmp@add_offset + end if + output(mm - 1, :, :) = (/ tmp(0, :, :) /) - output(mm - 1, :, :) + + delete(tmp) + end if + + ; calcuation of total precipitation flux = large-scale+convective+anvil + if (VAR(vv) .eq. "pr") then + tmp = f->PRECCON ; surface precipitation flux from convection + if (isatt(tmp, "scale_factor") .or. isatt(tmp, "add_offset")) then + tmp = tmp * tmp@scale_factor + tmp@add_offset + end if + output(mm - 1, :, :) = output(mm - 1, :, :) + tmp(0, :, :) + delete(tmp) + tmp = f->PRECANV ; surface precipitation flux from anvils + if (isatt(tmp, "scale_factor") .or. isatt(tmp, "add_offset")) then + tmp = tmp * tmp@scale_factor + tmp@add_offset + end if + output(mm - 1, :, :) = output(mm - 1, :, :) + tmp(0, :, :) + delete(tmp) + end if + + ; calculation of clwvi + if (VAR(vv) .eq. "clwvi") then + tmp = f->TQL + if (isatt(tmp, "scale_factor") .or. isatt(tmp, "add_offset")) then + tmp = tmp * tmp@scale_factor + tmp@add_offset + end if + output(mm - 1, :, :) = output(mm - 1, :, :) + tmp(0, :, :) + delete(tmp) + end if + + delete(f) + + end do ; loop over months (mm) + + ; Convert units + if (CONVFAC(vv) .ne. 1.0) then + output = output * CONVFAC(vv) + end if + + ; Format coordinates + format_coords(output, yy + "0101", yy + "1231", FREQ) + + ; Set variable attributes + tmp = format_variable(output, VAR(vv), CMOR_TABLE(vv)) + delete(output) + output = tmp + delete(tmp) + + ; set additional attribute "positive" for radiative fluxes + if (POSITIVE(vv) .ne. "") then + output@positive = POSITIVE(vv) + end if + + ; Calculate coordinate bounds + bounds = guess_coord_bounds(output, FREQ) + + ; Set global attributes + REF = "doi: " + DOI(vv) + gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) + + ; Output file + DATESTR = yy + "01-" + yy + "12" + fout = output_dir_path + \ + str_join((/"OBS", OBSNAME, TYPE, VERSION, \ + MIP, VAR(vv), DATESTR/), "_") + ".nc" + + ; Add height coordinate to tas variable (required by the new backend) + if (VAR(vv).eq."tas") then + output@coordinates = "height" + end if + + ; Write variable + write_nc(fout, VAR(vv), output, bounds, gAtt) + delete(gAtt) + delete(output) + delete(bounds) + + ; Add height coordinate to tas variable (required by CMOR checker) + if (VAR(vv).eq."tas") then + height = 2.d + height!0 = "ncl_scalar" + height@units = "m" + height@axis = "Z" + height@positive = "up" + height@long_name = "height" + height@standard_name = "height" + w = addfile(fout, "w") + w->height = height + delete(w) + end if + + ; --------------------------------------------------------------------- + + end do ; loop over years (yy) + end do ; loop over variables (vv) + +end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/mobo_dic2004_2019.py b/esmvaltool/cmorizers/data/formatters/datasets/mobo_dic2004_2019.py new file mode 100644 index 0000000000..570e20f715 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/mobo_dic2004_2019.py @@ -0,0 +1,17 @@ +"""ESMValTool CMORizer for MOBO-DIC2004-2019 data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://www.nodc.noaa.gov/archive/arc0211/0277099/2.3/data/0-data/ + +Last access + 20231009 + +Download and processing instructions + Download the file MPI_MOBO-DIC_2004-2019_v2.nc + +""" + +from .mobo_dic_mpim import cmorization # noqa diff --git a/esmvaltool/cmorizers/data/formatters/datasets/mobo_dic_mpim.py b/esmvaltool/cmorizers/data/formatters/datasets/mobo_dic_mpim.py index 68884c22ae..9ae096104f 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/mobo_dic_mpim.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/mobo_dic_mpim.py @@ -20,6 +20,7 @@ from pathlib import Path import iris +import numpy as np from cf_units import Unit from dask import array as da from iris import NameConstraint @@ -30,6 +31,9 @@ logger = logging.getLogger(__name__) +TIME_UNITS = Unit('days since 1950-01-01 00:00:00', calendar='standard') + + def _callback_fix_missing_value(cube, field, _): """Create masked array from missing_value.""" if hasattr(field.cf_data, 'missing_value'): @@ -40,20 +44,18 @@ def _callback_fix_missing_value(cube, field, _): def _fix_climatological_time(cube): """Fix climatology coordinate.""" - time_units = Unit('days since 1950-01-01 00:00:00', calendar='standard') - # Following the doc the covered time period of the climatology is # January 2004 to December 2017 (Use 2011 as the "mean" year). See # https://www.ncei.noaa.gov/access/metadata/landing-page/bin/ # iso?id=gov.noaa.nodc%3A0221526 - time_points = time_units.date2num( + time_points = TIME_UNITS.date2num( [datetime(2011, m, 15) for m in range(1, 13)] ) time_bounds = [ [datetime(2004, m, 1), datetime(2017, m + 1, 1)] for m in range(1, 12) ] time_bounds.append([datetime(2004, 12, 1), datetime(2018, 1, 1)]) - time_bounds = time_units.date2num(time_bounds) + time_bounds = TIME_UNITS.date2num(time_bounds) # Add new time coordinate to cube time_coord = DimCoord( @@ -62,7 +64,7 @@ def _fix_climatological_time(cube): standard_name='time', long_name='time', var_name='time', - units=time_units, + units=TIME_UNITS, climatological=True, ) cube.remove_coord('month of the year') @@ -73,6 +75,49 @@ def _fix_climatological_time(cube): cube.add_cell_method(CellMethod('mean over years', coords=time_coord)) +def _fix_time(cube): + """Fix time coordinate.""" + julian_day_coord = cube.coord('Julian Day') + + # Calculate bounds of new time coordinate + # print(str(julian_day_coord.units)) + datetime_base = datetime.strptime( + str(julian_day_coord.units).partition(' since ')[2], + '%Y-%m-%d %H:%M:%S', + ) + base_year = datetime_base.year + base_month = datetime_base.month + all_months = list(julian_day_coord.points.astype(int)) + [ + julian_day_coord.points.astype(int).max() + 1 # 1 more month for bnds + ] + bounds_datetimes = [ + datetime(base_year + (m - 1) // 12, base_month + (m - 1) % 12, 1) + for m in all_months + ] + time_bounds = np.stack( + ( + TIME_UNITS.date2num(bounds_datetimes[:-1]), + TIME_UNITS.date2num(bounds_datetimes[1:]), + ), + axis=-1, + ) + + # Calculate time points as mean of bounds + time_points = np.mean(time_bounds, axis=1) + + # Add new time coordinate to cube + time_coord = DimCoord( + time_points, + bounds=time_bounds, + standard_name='time', + long_name='time', + var_name='time', + units=TIME_UNITS, + ) + cube.remove_coord('Julian Day') + cube.add_dim_coord(time_coord, 0) + + def _fix_var_metadata(var_info, cmor_info, cube): """Fix variable metadata. @@ -121,7 +166,10 @@ def _extract_variable(var_info, cmor_info, attrs, filepath, out_dir): _fix_var_metadata(var_info, cmor_info, cube) # Fix coordinates - _fix_climatological_time(cube) + if cube.coords('month of the year'): # MOBO-DIC_MPIM + _fix_climatological_time(cube) + elif cube.coords('Julian Day'): # MOBO-DIC2004-2019 + _fix_time(cube) cube.coord('depth').units = 'm' utils.fix_coords(cube, overwrite_time_bounds=False) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/noaa_cires_20cr.py b/esmvaltool/cmorizers/data/formatters/datasets/noaa_cires_20cr_v2.py similarity index 92% rename from esmvaltool/cmorizers/data/formatters/datasets/noaa_cires_20cr.py rename to esmvaltool/cmorizers/data/formatters/datasets/noaa_cires_20cr_v2.py index cdc5efbd40..3744cdce67 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/noaa_cires_20cr.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/noaa_cires_20cr_v2.py @@ -1,4 +1,4 @@ -"""ESMValTool CMORizer for NOAA-CIRES-20CR data. +"""ESMValTool CMORizer for NOAA-CIRES-20CR-V2 data. Tier Tier 2: other freely-available dataset. diff --git a/esmvaltool/cmorizers/data/formatters/datasets/noaa_cires_20cr_v3.py b/esmvaltool/cmorizers/data/formatters/datasets/noaa_cires_20cr_v3.py new file mode 100644 index 0000000000..9405473931 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/noaa_cires_20cr_v3.py @@ -0,0 +1,32 @@ +"""ESMValTool CMORizer for NOAA-CIRES-20CR-V3 data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://psl.noaa.gov/data/gridded/data.ncep.reanalysis2.html + +Last access + 20230327 + +Download and processing instructions + To facilitate the download, the links to the ftp server are provided. + + ftp://ftp.cdc.noaa.gov/Datasets/20thC_ReanV3/Monthlies/ + + pr_wtr.eatm.mon.mean.nc + cldwtr.eatm.mon.mean.nc + tcdc.eatm.mon.mean.nc + ulwrf.ntat.mon.mean.nc + uswrf.ntat.mon.mean.nc + csulf.ntat.mon.mean.nc + csusf.ntat.mon.mean.nc + shum.mon.mean.nc + +Caveats + +""" +from .ncep_ncar_r1 import cmorization + +# The following line makes it clear that the above import is not an error +cmorization diff --git a/esmvaltool/cmorizers/data/formatters/datasets/noaa_mbl_ch4.py b/esmvaltool/cmorizers/data/formatters/datasets/noaa_mbl_ch4.py new file mode 100644 index 0000000000..30011229de --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/noaa_mbl_ch4.py @@ -0,0 +1,159 @@ +"""ESMValTool CMORizer for NOAA-MBL-CH4 data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://gml.noaa.gov/ccgg/trends_ch4/ + +Last access + 20230717 + +Download and processing instructions + Download the file: + wget https://gml.noaa.gov/webdata/ccgg/trends/ch4/ch4_mm_gl.csv +""" + +import logging +import warnings +from pathlib import Path + +from datetime import datetime +import iris +import pandas as pd +from cf_units import Unit +import numpy as np + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + +LAT_COORD = iris.coords.DimCoord([0.], + bounds=[[-90.0, 90.0]], + var_name='lat', + standard_name='latitude', + long_name='latitude', + units='degrees') +LON_COORD = iris.coords.DimCoord([180.0], + bounds=[[0., 360.]], + var_name='lon', + standard_name='longitude', + long_name='longitude', + units='degrees') + + +def _fix_var_metadata(var_info, cmor_info, cube): + """Fix variable metadata.""" + if 'raw_units' in var_info: + cube.units = var_info['raw_units'] + + cube.convert_units(cmor_info.units) + + utils.fix_var_metadata(cube, cmor_info) + return cube + + +def _get_time_coord(year, month): + """Get time coordinate.""" + point = datetime(year=year, month=month, day=15) + bound_low = datetime(year=year, month=month, day=1) + if month == 12: + month_bound_up = 1 + year_bound_up = year + 1 + else: + month_bound_up = month + 1 + year_bound_up = year + bound_up = datetime(year=year_bound_up, month=month_bound_up, day=1) + time_units = Unit('days since 1950-01-01 00:00:00', calendar='standard') + time_coord = iris.coords.DimCoord( + time_units.date2num(point), + bounds=time_units.date2num([bound_low, bound_up]), + var_name='time', + standard_name='time', + long_name='time', + units=time_units, + ) + return time_coord + + +def _get_cube(row, column_name): + """Create :class:`iris.cube.Cube` from :class:`pandas.Series`.""" + time_coord = _get_time_coord(int(row['year']), int(row['month'])) + lat_coord = LAT_COORD.copy() + lon_coord = LON_COORD.copy() + data = np.ma.masked_invalid(row[column_name]) + cube = iris.cube.Cube( + data.reshape((1, 1, 1)), + dim_coords_and_dims=[(time_coord, 0), (lat_coord, 1), (lon_coord, 2)], + units='ppb', + ) + return cube + + +def _fix_coords(cube): + """Fix coordinates.""" + utils.fix_dim_coordnames(cube) + + return cube + + +def _extract_variable(var_info, cmor_info, attrs, filepath, out_dir): + """Extract variable.""" + var = cmor_info.short_name + + # Load data + with warnings.catch_warnings(): + warnings.filterwarnings( + action='ignore', + message="Skipping global attribute 'units': 'units' is not a " + "permitted attribute", + category=UserWarning, + module='iris', + ) + skiprows = 0 + with open(filepath, 'r', encoding='utf-8') as csv: + for line in csv: + if line.startswith("#"): + skiprows = skiprows + 1 + + data_frame = pd.read_csv(filepath, header=skiprows) + + # Extract cube + cubes = iris.cube.CubeList() + for (_, row) in data_frame.iterrows(): + cube = _get_cube(row, 'average') + cubes.append(cube) + cube = cubes.concatenate_cube() + cube.var_name = var + + # Fix coordinates + cube = _fix_coords(cube) + + # Fix variable metadata + cube = _fix_var_metadata(var_info, cmor_info, cube) + + # Fix global metadata + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable( + cube, + var, + out_dir, + attrs, + unlimited_dimensions=['time'], + ) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + cmor_table = cfg['cmor_table'] + glob_attrs = cfg['attributes'] + + # Run the cmorization + for (var, var_info) in cfg['variables'].items(): + filepath = Path(in_dir) / var_info['filename'] + logger.info("CMORizing variable '%s' from file %s", var, filepath) + glob_attrs['mip'] = var_info['mip'] + cmor_info = cmor_table.get_variable(var_info['mip'], var) + _extract_variable(var_info, cmor_info, glob_attrs, filepath, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/utilities.ncl b/esmvaltool/cmorizers/data/formatters/utilities.ncl index 42632889ef..4c8cf6444e 100644 --- a/esmvaltool/cmorizers/data/formatters/utilities.ncl +++ b/esmvaltool/cmorizers/data/formatters/utilities.ncl @@ -209,11 +209,11 @@ begin ; Reset date (middle of the month) tt = 0 do yy = year1, year2 - date(tt, 0) = yy yy@calendar = calendar m1 = where(yy.eq.year1, month1, 1) m2 = where(yy.eq.year2, month2, 12) do mm = m1, m2 + date(tt, 0) = yy date(tt, 1) = mm dm = days_in_month(yy, mm) / 2. + 1 date(tt, 2) = toint(dm) @@ -245,14 +245,14 @@ begin ; Reset date (middle of the day) tt = 0 do yy = year1, year2 - date(tt, 0) = yy m1 = where(yy.eq.year1, month1, 1) m2 = where(yy.eq.year2, month2, 12) do mm = m1, m2 - date(tt, 1) = mm d1 = where(yy.eq.year1 .and. mm.eq.m1, day1, 1) d2 = where(yy.eq.year2 .and. mm.eq.m2, day2, days_in_month(yy, mm)) do dd = d1, d2 + date(tt, 0) = yy + date(tt, 1) = mm date(tt, 2) = dd tt = tt + 1 end do diff --git a/esmvaltool/config-references.yml b/esmvaltool/config-references.yml index f48ac9a6ab..16725c3764 100644 --- a/esmvaltool/config-references.yml +++ b/esmvaltool/config-references.yml @@ -120,10 +120,19 @@ authors: institute: DLR, Germany orcid: https://orcid.org/0000-0001-7058-5938 github: LisaBock + bodas-salcedo_alejandro: + name: Bodas-Salcedo, Alejandro + institute: MetOffice, UK + orcid: bojovic_dragana: name: Bojovic, Dragana institute: BSC, Spain orcid: https://orcid.org/0000-0001-7354-1885 + bonnet_pauline: + name: Bonnet, Pauline + institute: DLR, Germany + orcid: https://orcid.org/0000-0003-3780-0784 + github: Paulinebonnet111 brunner_lukas: name: Brunner, Lukas institute: ETH Zurich, Switzerland @@ -252,6 +261,11 @@ authors: name: Hempelmann, Nils institute: IPSL, France orcid: + heuer_helge: + name: Heuer, Helge + institute: DLR, Germany + email: helge.heuer@dlr.de + orcid: https://orcid.org/0000-0003-2411-7150 hogan_emma: name: Hogan, Emma institute: MetOffice, UK @@ -288,10 +302,20 @@ authors: name: Koirala, Sujan institute: MPI-BGC, Germany orcid: https://orcid.org/0000-0001-5681-1986 + kraft_jeremy: + name: Kraft, Jeremy + institute: DLR, Germany + orcid: + github: jeremykraftdlr krasting_john: name: Krasting, John institute: NOAA, USA orcid: https://orcid.org/0000-0002-4650-9844 + kuehbacher_birgit: + name: Kuehbacher, Birgit + institute: DLR, Germany + email: birgit.kuehbacher@dlr.de + orcid: lejeune_quentin: name: Lejeune, Quentin institute: Climate Analytics, Germany @@ -391,6 +415,10 @@ authors: name: Pandde, Amarjiit institute: Univ. of Arizona, USA orcid: + pearce_francesca: + name: Pearce, Francesca + institute: MetOffice, UK + orcid: perez-zanon_nuria: name: Perez-Zanon, Nuria institute: BSC, Spain @@ -446,6 +474,11 @@ authors: name: Sandstad, Marit institute: Cicero, Norway orcid: + sarauer_ellen: + name: Sarauer, Ellen + institute: DLR, Germany + orcid: + github: ellensarauer serva_federico: name: Serva, Federico institute: CNR, Italy @@ -597,7 +630,7 @@ authors: sellar_alistair: name: Sellar, Alistair institute: MetOffice, UK - orcid: + orcid: 0000-0002-2955-7254 wyser_klaus: name: Wyser, Klaus institute: SMHI, Sweden @@ -668,6 +701,10 @@ authors: institute: orcid: github: mcreader97 + rumbold_heather: + name: Heather, Rumbold + institute: Met Office, UK + orcid: senftleben_daniel: name: Senftleben, Daniel institute: DLR, Germany diff --git a/esmvaltool/diag_scripts/austral_jet/asr.ncl b/esmvaltool/diag_scripts/austral_jet/asr.ncl index c12b66f166..5d855ad393 100644 --- a/esmvaltool/diag_scripts/austral_jet/asr.ncl +++ b/esmvaltool/diag_scripts/austral_jet/asr.ncl @@ -289,7 +289,7 @@ begin work_dir = output@work_dir opt = diag_script_info log_info(opt@wdiag) - plot_path = "missing" + plot_path = "n/a" plot_type = "" ; Iterate over all datasets diff --git a/esmvaltool/diag_scripts/austral_jet/main.ncl b/esmvaltool/diag_scripts/austral_jet/main.ncl index 446133e1d5..c078830687 100644 --- a/esmvaltool/diag_scripts/austral_jet/main.ncl +++ b/esmvaltool/diag_scripts/austral_jet/main.ncl @@ -731,7 +731,7 @@ begin if (output) then work_dir := output@work_dir opt = diag_script_info - plot_path = "missing" + plot_path = "n/a" plot_type = "" ; Iterate over all desired diagnostics diff --git a/esmvaltool/diag_scripts/autoassess/_plot_mo_metrics.py b/esmvaltool/diag_scripts/autoassess/_plot_mo_metrics.py index 425cdbd0b6..52eca796ab 100644 --- a/esmvaltool/diag_scripts/autoassess/_plot_mo_metrics.py +++ b/esmvaltool/diag_scripts/autoassess/_plot_mo_metrics.py @@ -13,6 +13,8 @@ import matplotlib.pyplot as plt import numpy as np +from esmvaltool.diag_scripts.shared import save_figure + # Define some colours BLACK = '#000000' RED = '#FF0000' @@ -596,7 +598,8 @@ def plot_nac(cref, acc=None, extend_y=False, title=None, - ofile=None): + ofile=None, + config=None): """ Routine to produce NAC plot. @@ -611,6 +614,7 @@ def plot_nac(cref, :param bool extend_y: Extend y-axis to include obs/acc ranges :param str title: Plot title :param str ofile: Plot file name + :param dict config: ESMValTool configuration object """ # initialize if metrics is None: @@ -682,15 +686,31 @@ def plot_nac(cref, legend.set_title('Vs %s' % cref, prop={'size': 'small'}) # Display or produce file - if ofile: - # Create directory to write file to - odir = os.path.dirname(ofile) - if not os.path.isdir(odir): - os.makedirs(odir) + if ofile and config: + os.makedirs(config['plot_dir'], exist_ok=True) + provenance = get_provenance_record(config) # Note that bbox_inches only works for png plots - plt.savefig(ofile, bbox_extra_artists=(legend, ), bbox_inches='tight') + save_figure(ofile, provenance, config, fig, + bbox_extra_artists=(legend, ), bbox_inches='tight') else: # Need the following to attempt to display legend in frame fig.subplots_adjust(right=0.85) plt.show() plt.close() + + +def get_provenance_record(config): + """Create a provenance record describing the diagnostic data and plot.""" + filenames = [item["filename"] for item in config["input_data"].values()] + record = { + 'caption': 'Normalised assessment criteria plot', + 'plot_type': 'metrics', + 'authors': [ + 'williams_keith', + 'predoi_valeriu', + 'sellar_alistair' + ], + "ancestors": filenames, + } + + return record diff --git a/esmvaltool/diag_scripts/autoassess/land_surface_soilmoisture/soilmoisture.py b/esmvaltool/diag_scripts/autoassess/land_surface_soilmoisture/soilmoisture.py index 85d6a7dc02..0533b94eed 100644 --- a/esmvaltool/diag_scripts/autoassess/land_surface_soilmoisture/soilmoisture.py +++ b/esmvaltool/diag_scripts/autoassess/land_surface_soilmoisture/soilmoisture.py @@ -2,16 +2,25 @@ import os import logging +import csv +from collections.abc import Iterable import numpy as np import iris from esmvalcore.preprocessor import regrid from esmvaltool.diag_scripts.shared._base import ProvenanceLogger -from esmvaltool.diag_scripts.shared._supermeans import get_supermean +from esmvaltool.diag_scripts.shared import ( + group_metadata, + run_diagnostic, + save_data, +) + +# Order of seasons must agree with preprocessor definition in recipe +SEASONS = ("djf", "mam", "jja", "son") logger = logging.getLogger(__name__) -def get_provenance_record(caption, run): +def get_provenance_record(caption, ancestor_filenames): """Create a provenance record describing the diagnostic data and plot.""" record = { 'caption': caption, @@ -27,69 +36,142 @@ def get_provenance_record(caption, run): 'dorigo17rse', 'gruber19essd', ], - 'ancestors': run, + "ancestors": ancestor_filenames, } return record -def land_sm_top(run): +def write_metrics(output_dir, metrics, config, ancestors): + """Write metrics to CSV file. + + The CSV file will have the name ``metrics.csv`` and can be + used for the normalised metric assessment plot. + + Parameters + ---------- + output_dir : string + The full path to the directory in which the CSV file will be written. + metrics : dictionary of metric,value pairs + The seasonal data to write. + config : dictionary + ESMValTool configuration object + ancestors : list + Filenames of input files for provenance + """ + os.makedirs(output_dir, exist_ok=True) + + file_name = "metrics.csv" + file_path = os.path.join(output_dir, file_name) + + with open(file_path, "w", newline="", encoding="utf-8") as csvfile: + csv_writer = csv.writer(csvfile) + for line in metrics.items(): + csv_writer.writerow(line) + + record_provenance(file_path, config, ancestors) + + +def volumetric_soil_moisture(model_file, constr_season): + """ + Read moisture mass content and convert to volumetric soil moisture. + + Parameters + ---------- + model_file : string + Path to model file + constr_season : iris constraint + Constraint on season to load + + Returns + ------- + vol_sm1_run : cube + Volumetric soil moisture + """ + # Constant: density of water + rhow = 1000. + + # m01s08i223 + # CMOR name: mrsos (soil moisture in top model layer kg/m2) + mrsos = iris.load_cube( + model_file, + "mass_content_of_water_in_soil_layer" & constr_season + ) + + # Set soil moisture to missing data where no soil (moisture=0) + np.ma.masked_where(mrsos.data == 0, mrsos.data, copy=False) + + # first soil layer depth + dz1 = mrsos.coord('depth').bounds[0, 1] - \ + mrsos.coord('depth').bounds[0, 0] + + # Calculate the volumetric soil moisture in m3/m3 + # volumetric soil moisture = volume of water / volume of soil layer + # = depth equivalent of water / thickness of soil layer + # = (soil moisture content (kg m-2) / water density (kg m-3) ) / + # soil layer thickness (m) + # = mosrs / (rhow * dz1) + vol_sm1_run = mrsos / (rhow * dz1) + vol_sm1_run.units = "m3 m-3" + vol_sm1_run.long_name = "Top layer Soil Moisture" + + return vol_sm1_run + + +def flatten(list_of_lists): + """ + Convert list of lists into a flat list, allowing some items to be non-list. + + Parameters + ---------- + list_of_lists : list + List containing iterables to flatten, plus optionally non-list items + + Returns + ------- + flattened : list + Flattened list with one level of nesting removed + """ + flattened = [] + for item in list_of_lists: + if isinstance(item, Iterable) and not isinstance(item, (str, bytes)): + flattened.extend(item) + else: + flattened.append(item) + + return flattened + + +def land_sm_top(clim_file, model_file, model_dataset, config, ancestors): """ Calculate median absolute errors for soil mosture against CCI data. Parameters ---------- - run: dict - dictionary containing model run metadata - (see auto_assess/model_run.py for description) + clim_file : string + Path to observation climatology file + model_file : list + Paths to model files + model_dataset : string + Name of model dataset + config : dict + ESMValTool configuration object + ancestors : list + Filenames of input files for provenance Returns ------- metrics: dict a dictionary of metrics names and values """ - supermean_data_dir = os.path.join(run['data_root'], run['runid'], - run['_area'] + '_supermeans') - - seasons = ['djf', 'mam', 'jja', 'son'] + # Work through each season + metrics = {} + for index, season in enumerate(SEASONS): - # Constant: density of water - rhow = 1000. + constr_season = iris.Constraint(season_number=index) + ecv_clim = iris.load_cube(clim_file, constr_season) - # Work through each season - metrics = dict() - for season in seasons: - fname = 'ecv_soil_moisture_{}.nc'.format(season) - clim_file = os.path.join(run['climfiles_root'], fname) - ecv_clim = iris.load_cube(clim_file) - # correct invalid units - if (ecv_clim.units == 'unknown' and - 'invalid_units' in ecv_clim.attributes): - if ecv_clim.attributes['invalid_units'] == 'm^3m^-3': - ecv_clim.units = 'm3 m-3' - - # m01s08i223 - # CMOR name: mrsos (soil moisture in top model layer kg/m2) - mrsos = get_supermean('mass_content_of_water_in_soil_layer', - season, - supermean_data_dir) - - # Set soil moisture to missing data on ice points (i.e. no soil) - np.ma.masked_where(mrsos.data == 0, mrsos.data, copy=False) - - # first soil layer depth - dz1 = mrsos.coord('depth').bounds[0, 1] - \ - mrsos.coord('depth').bounds[0, 0] - - # Calculate the volumetric soil moisture in m3/m3 - # volumetric soil moisture = volume of water / volume of soil layer - # = depth equivalent of water / thickness of soil layer - # = (soil moisture content (kg m-2) / water density (kg m-3) ) / - # soil layer thickness (m) - # = mosrs / (rhow * dz1) - vol_sm1_run = mrsos / (rhow * dz1) - vol_sm1_run.units = "m3 m-3" - vol_sm1_run.long_name = "Top layer Soil Moisture" + vol_sm1_run = volumetric_soil_moisture(model_file, constr_season) # update the coordinate system ECV data with a WGS84 coord system # unify coord systems for regridder @@ -117,23 +199,72 @@ def land_sm_top(run): dff = vol_sm1_run - ecv_clim # save output and populate metric - iris.save(dff, os.path.join(run['dump_output'], - 'soilmoist_diff_{}.nc'.format(season))) - name = 'soilmoisture MedAbsErr {}'.format(season) - dffs = dff.data - dffs = np.ma.abs(dffs) - metrics[name] = float(np.ma.median(dffs)) - - # record provenance - plot_file = "Autoassess soilmoisture metrics" - caption = 'Autoassess soilmoisture MedAbsErr for {}'.format(str(seasons)) - provenance_record = get_provenance_record(caption, run) - cfg = {} - cfg['run_dir'] = run['out_dir'] - # avoid rewriting provenance when running the plot diag - if not os.path.isfile(os.path.join(cfg['run_dir'], - 'diagnostic_provenance.yml')): - with ProvenanceLogger(cfg) as provenance_logger: - provenance_logger.log(plot_file, provenance_record) + caption = f"{model_dataset} minus CCI soil moisture clim for {season}" + provenance_record = get_provenance_record(caption, ancestors) + save_data(f"soilmoist_diff_{model_dataset}_{season}", + provenance_record, config, dff) + + name = f"soilmoisture MedAbsErr {season}" + metrics[name] = float(np.ma.median(np.ma.abs(dff.data))) return metrics + + +def record_provenance(diagnostic_file, config, ancestors): + """Record provenance.""" + caption = f"Autoassess soilmoisture MedAbsErr for {SEASONS}" + provenance_record = get_provenance_record(caption, ancestors) + with ProvenanceLogger(config) as provenance_logger: + provenance_logger.log(diagnostic_file, provenance_record) + + +def main(config): + """ + Top-level function for soil moisture metrics. + + Parameters + ---------- + config : dict + The ESMValTool configuration. + """ + input_data = config["input_data"] + + # Separate OBS from model datasets + # (and check there is only one obs dataset) + obs = [v for v in input_data.values() if v["project"] == "OBS"] + if len(obs) != 1: + msg = f"Expected exactly 1 OBS dataset: found {len(obs)}" + raise RuntimeError(msg) + clim_file = obs[0]["filename"] + + models = group_metadata( + [v for v in input_data.values() if v["project"] != "OBS"], + "dataset") + + for model_dataset, group in models.items(): + # 'model_dataset' is the name of the model dataset. + # 'group' is a list of dictionaries containing metadata. + logger.info("Processing data for %s", model_dataset) + model_file = [item["filename"] for item in group] + + # Input filenames for provenance + ancestors = flatten([model_file, clim_file]) + + # Calculate metrics + metrics = land_sm_top(clim_file, model_file, model_dataset, config, + ancestors) + + # Write metrics + metrics_dir = os.path.join( + config["plot_dir"], + f"{config['exp_model']}_vs_{config['control_model']}", + config["area"], + model_dataset, + ) + + write_metrics(metrics_dir, metrics, config, ancestors) + + +if __name__ == "__main__": + with run_diagnostic() as CONFIG: + main(CONFIG) diff --git a/esmvaltool/diag_scripts/autoassess/plot_autoassess_metrics.py b/esmvaltool/diag_scripts/autoassess/plot_autoassess_metrics.py index ccf3051d64..201ae0c248 100644 --- a/esmvaltool/diag_scripts/autoassess/plot_autoassess_metrics.py +++ b/esmvaltool/diag_scripts/autoassess/plot_autoassess_metrics.py @@ -43,10 +43,12 @@ def main(): cfg['diag_name'], vsloc, cfg['area'], control_model, 'metrics.csv') plot_title = ' '.join([cfg['area'], control_model, 'vs', exp_model]) - # Read metrics files + # Read (and record) metrics files # metrics = read_order_metrics(args.file_ord) ref = read_model_metrics(file_ref) tests = [read_model_metrics(file_exp)] + cfg['input_data'] = {'ref': {'filename': file_ref}, + 'exp': {'filename': file_exp}} # var = read_model_metrics(args.file_var) obs, acc = None, None if 'additional_metrics' in cfg: @@ -68,7 +70,8 @@ def main(): acc=acc, extend_y=False, title=plot_title, - ofile=os.path.join(cfg['plot_dir'], cfg['plot_name'] + '.png')) + ofile=cfg['plot_name'], + config=cfg) if __name__ == '__main__': diff --git a/esmvaltool/diag_scripts/carbon_ec/carbon_beta.ncl b/esmvaltool/diag_scripts/carbon_ec/carbon_beta.ncl index eb43b10cd3..0d6319ea85 100644 --- a/esmvaltool/diag_scripts/carbon_ec/carbon_beta.ncl +++ b/esmvaltool/diag_scripts/carbon_ec/carbon_beta.ncl @@ -279,11 +279,7 @@ begin create_legend_lines(leg@annots, leg, plot_dir + \ DIAG_SCRIPT + "_legend", "markers") - if (file_type .ne. "png") then - plotname = plot_dir + plot_file + "." + file_type - else - plotname = plot_dir + plot_file + ".000001.png" - end if + plotname = plot_dir + plot_file + "." + file_type ; Call provenance logger log_provenance(ncdf_outfile, \ diff --git a/esmvaltool/diag_scripts/carbon_ec/carbon_co2_cycle.ncl b/esmvaltool/diag_scripts/carbon_ec/carbon_co2_cycle.ncl index 7876ffdb62..f37b7bc23a 100644 --- a/esmvaltool/diag_scripts/carbon_ec/carbon_co2_cycle.ncl +++ b/esmvaltool/diag_scripts/carbon_ec/carbon_co2_cycle.ncl @@ -492,11 +492,7 @@ begin ; Write NetCDF output ncdf_outfile = ncdf_write(CO2var, new_path) - if (file_type .ne. "png") then - plotname = plot_dir + plot_file + "." + file_type - else - plotname = plot_dir + plot_file + ".000001.png" - end if + plotname = plot_dir + plot_file + "." + file_type ; ----------------------------------------------------------- ; Call provenance logger diff --git a/esmvaltool/diag_scripts/carbon_ec/carbon_constraint.ncl b/esmvaltool/diag_scripts/carbon_ec/carbon_constraint.ncl index 94d5b8145c..61f1f5c5a3 100644 --- a/esmvaltool/diag_scripts/carbon_ec/carbon_constraint.ncl +++ b/esmvaltool/diag_scripts/carbon_ec/carbon_constraint.ncl @@ -331,7 +331,7 @@ begin ; Call provenance logger log_provenance(ncdf_outfile, \ - output_dir + "." + file_type, \ + output_dir + plot_file + "." + file_type, \ XStg + " vs " + YStg, \ (/"anomaly", "corr", "stddev"/), \ (/"trop", "global"/),\ diff --git a/esmvaltool/diag_scripts/carbon_ec/carbon_gammaHist.ncl b/esmvaltool/diag_scripts/carbon_ec/carbon_gammaHist.ncl index 1479620173..589b0d72e8 100644 --- a/esmvaltool/diag_scripts/carbon_ec/carbon_gammaHist.ncl +++ b/esmvaltool/diag_scripts/carbon_ec/carbon_gammaHist.ncl @@ -336,7 +336,7 @@ begin ; Call provenance logger log_provenance(ncdf_outfile, \ - output_dir + "." + file_type, \ + output_dir + plot_file + "." + file_type, \ XStg + " vs " + YStg, \ (/"anomaly", "corr", "stddev"/), \ (/"trop", "global"/),\ diff --git a/esmvaltool/diag_scripts/clouds/clouds.ncl b/esmvaltool/diag_scripts/clouds/clouds.ncl index 928d7cc96d..c05c091cf4 100644 --- a/esmvaltool/diag_scripts/clouds/clouds.ncl +++ b/esmvaltool/diag_scripts/clouds/clouds.ncl @@ -688,7 +688,6 @@ begin ; function in aux_plotting.ncl if (ii.eq.0) then - nframe = 0 ndframe = 0 ; note: an array of workspaces (i.e. wks(numseas)) does not work as ; attributes cannot be assigned to each array element @@ -768,8 +767,6 @@ begin maps(imod, 0) = gsn_csm_contour_map(wks0, data1, res) end if - nframe = nframe + 1 - ; mandatory netcdf output data1@var = var0 + "_mean_" + names(imod) @@ -1310,19 +1307,10 @@ begin end if end if ; if embracesetup - nframe = nframe + 1 - do is = 0, numseas - 1 log_info("Wrote " + outfile(is)) end do - do is = 0, numseas - 1 - suffix = get_file_suffix(outfile(is), 0) - if (suffix .eq. ".png") then - outfile(is) = suffix@fBase + "." + sprinti("%0.6i", nframe) + suffix - end if - end do - ; ------------------------------------------------------------------------ ; write provenance to netcdf output and plot file(s) (mean) ; ------------------------------------------------------------------------ @@ -1482,13 +1470,6 @@ begin end if end if ; end if embracesetup - do is = 0, numseas - 1 - suffix = get_file_suffix(outfile_d(is), 0) - if (suffix .eq. ".png") then - outfile_d(is) = suffix@fBase + "." + sprinti("%0.6i", ndframe) + suffix - end if - end do - do is = 0, numseas - 1 log_info(" Wrote " + outfile(is)) diff --git a/esmvaltool/diag_scripts/clouds/clouds_dyn_matrix.ncl b/esmvaltool/diag_scripts/clouds/clouds_dyn_matrix.ncl index 9fcfadebbb..c18da6fe23 100644 --- a/esmvaltool/diag_scripts/clouds/clouds_dyn_matrix.ncl +++ b/esmvaltool/diag_scripts/clouds/clouds_dyn_matrix.ncl @@ -681,8 +681,6 @@ begin xyres2@tmXTLabelsOn = True xyres2@tmXBLabelsOn = False - nframe = 0 - do ii = 0, nplots - 1 if (ii .lt. dim_MOD) then plotdata = result(ii, :, :) @@ -775,8 +773,6 @@ begin delete(plotdata) delete(countdata) - - nframe = nframe + 1 end do ; pres = True ; needed to override @@ -799,11 +795,6 @@ begin log_info("Wrote " + doutfile) log_info("Wrote " + coutfile) - suffix = get_file_suffix(outfile, 0) - if (suffix .eq. ".png") then - outfile = suffix@fBase + "." + sprinti("%0.6i", nframe) + suffix - end if - ; ========================================================================== ; ---------------------------------------------------------------------- diff --git a/esmvaltool/diag_scripts/clouds/clouds_interannual.ncl b/esmvaltool/diag_scripts/clouds/clouds_interannual.ncl index e27984a822..a3e318c556 100644 --- a/esmvaltool/diag_scripts/clouds/clouds_interannual.ncl +++ b/esmvaltool/diag_scripts/clouds/clouds_interannual.ncl @@ -445,11 +445,9 @@ begin if (ii.eq.0) then wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_interannual_" \ + var0 + filename_add) - nframe = 0 end if maps(ii) = gsn_csm_contour_map(wks, data1, res) - nframe = nframe + 1 ; ######################################### ; # output all datasets to common netCDF # @@ -478,7 +476,6 @@ begin pres@pmLabelBarOrthogonalPosF = -0.01 ; shift label bar a bit to ; the bottom outfile = panelling(wks, maps, (dim_MOD + 3) / 4, 4, pres) - nframe = nframe + 1 ; plot multi-obs and multi-model average (if requested) @@ -535,11 +532,6 @@ begin log_info(" Wrote " + outfile) - suffix = get_file_suffix(outfile, 0) - if (suffix .eq. ".png") then - outfile = suffix@fBase + "." + sprinti("%0.6i", nframe) + suffix - end if - ; ------------------------------------------------------------------------ ; write provenance to common netcdf and plot file ; ------------------------------------------------------------------------ diff --git a/esmvaltool/diag_scripts/clouds/clouds_lifrac_scatter.ncl b/esmvaltool/diag_scripts/clouds/clouds_lifrac_scatter.ncl index f9bf33f30d..b383753c90 100644 --- a/esmvaltool/diag_scripts/clouds/clouds_lifrac_scatter.ncl +++ b/esmvaltool/diag_scripts/clouds/clouds_lifrac_scatter.ncl @@ -652,8 +652,6 @@ begin data_l(0, :) = data@_FillValue end if - nframe = 0 - do ii = 0, dim_MOD + add_dim_MOD - 1 if (ii .eq. refidx_ta) then continue @@ -724,8 +722,6 @@ begin frame(wks_i) draw(plots_l(ii)) frame(wks_l) - - nframe = nframe + 1 end do pres = True ; needed to override @@ -747,17 +743,6 @@ begin log_info("Wrote " + outfile_l) delete(idx0) - nframe = nframe + 1 - - suffix = get_file_suffix(outfile_i, 0) - if (suffix .eq. ".png") then - outfile_i = suffix@fBase + "." + sprinti("%0.6i", nframe) + suffix - end if - suffix = get_file_suffix(outfile_l, 0) - if (suffix .eq. ".png") then - outfile_l = suffix@fBase + "." + sprinti("%0.6i", nframe) + suffix - end if - ; ========================================================================== ; ---------------------------------------------------------------------- diff --git a/esmvaltool/diag_scripts/clouds/clouds_pdf.ncl b/esmvaltool/diag_scripts/clouds/clouds_pdf.ncl index 36f9f6fbef..bc94f08fc2 100644 --- a/esmvaltool/diag_scripts/clouds/clouds_pdf.ncl +++ b/esmvaltool/diag_scripts/clouds/clouds_pdf.ncl @@ -220,7 +220,6 @@ begin ; function in aux_plotting.ncl if (ii.eq.0) then - nframe = 0 wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_pdf_" + var0 \ + filename_add) wks_line = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_pdf_" + var0 \ @@ -235,7 +234,6 @@ begin res@tiMainString = names(imod) plot(ii) = gsn_histogram(wks, ndtooned(data1), res) - nframe = nframe + 1 ; mandatory netcdf output @@ -364,12 +362,6 @@ begin gsn_panel(wks, plot(plotind), plotsperline, pres) outfile = wks@fullname - nframe = nframe + 1 - - suffix = get_file_suffix(outfile, 0) - if (suffix .eq. ".png") then - outfile = suffix@fBase + "." + sprinti("%0.6i", nframe) + suffix - end if ; ======================================================================== diff --git a/esmvaltool/diag_scripts/clouds/clouds_scatter.ncl b/esmvaltool/diag_scripts/clouds/clouds_scatter.ncl index 35e217613b..996fcd7121 100644 --- a/esmvaltool/diag_scripts/clouds/clouds_scatter.ncl +++ b/esmvaltool/diag_scripts/clouds/clouds_scatter.ncl @@ -434,8 +434,6 @@ begin data(0, :) = data@_FillValue end if - nframe = 0 - do ii = 0, dim_MOD - 1 if (ii .eq. refidx_x) then continue @@ -485,7 +483,6 @@ begin res@tiMainString = names(ii) plots(ii) = gsn_csm_xy(wks, result_avg&bin, data, res) - nframe = nframe + 1 if (ii .eq. mm_ind) then res_std = True @@ -554,16 +551,10 @@ begin pres@lbLabelFontHeightF = min((/0.01, 0.01 * 6.0 \ / tofloat((dim_MOD + 1) / 2)/)) outfile = panelling(wks, plots(idx0), (n + 3) / 4, 4, pres) - nframe = nframe + 1 delete(idx0) log_info("Wrote " + outfile) - suffix = get_file_suffix(outfile, 0) - if (suffix .eq. ".png") then - outfile = suffix@fBase + "." + sprinti("%0.6i", nframe) + suffix - end if - ; ========================================================================== ; ---------------------------------------------------------------------- diff --git a/esmvaltool/diag_scripts/clouds/clouds_seasonal_cycle.ncl b/esmvaltool/diag_scripts/clouds/clouds_seasonal_cycle.ncl index d918e9eccb..1b6ae7fdad 100644 --- a/esmvaltool/diag_scripts/clouds/clouds_seasonal_cycle.ncl +++ b/esmvaltool/diag_scripts/clouds/clouds_seasonal_cycle.ncl @@ -405,11 +405,9 @@ begin if (ii.eq.0) then wks = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_seasonal_cycle_" \ + var0 + filename_add) - nframe = 0 end if maps(ii) = gsn_csm_contour_map(wks, data1, res) - nframe = nframe + 1 ; ######################################### ; # output all datasets to common netCDF # @@ -438,12 +436,6 @@ begin pres@pmLabelBarOrthogonalPosF = -0.01 ; shift label bar a bit to ; the bottom outfile = panelling(wks, maps, (dim_MOD + 3) / 4, 4, pres) - nframe = nframe + 1 - - suffix = get_file_suffix(outfile, 0) - if (suffix .eq. ".png") then - outfile = suffix@fBase + "." + sprinti("%0.6i", nframe) + suffix - end if ; plot multi-obs and multi-model average (if requested) @@ -455,7 +447,6 @@ begin res@gsnLeftString = "mean = " + sprintf("%6.3f", gavg) res@tiMainString = "Multi-obs average" map_multiobs = gsn_csm_contour_map(wks, multiobs, res) - nframe = nframe + 1 end if if (flag_multimod) then @@ -488,7 +479,6 @@ begin res@gsnRightString = "rmsd = " + sprintf("%6.3f", rmsd) res@tiMainString = "Multi-model average" map_multimod = gsn_csm_contour_map(wks, multimod, res) - nframe = nframe + 1 end if log_info(" Wrote " + outfile) diff --git a/esmvaltool/diag_scripts/clouds/clouds_zonal.ncl b/esmvaltool/diag_scripts/clouds/clouds_zonal.ncl index 32b7befb7f..a9ab9848c4 100644 --- a/esmvaltool/diag_scripts/clouds/clouds_zonal.ncl +++ b/esmvaltool/diag_scripts/clouds/clouds_zonal.ncl @@ -483,7 +483,6 @@ begin ; individually wks0 = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_zonal_" + var0 + \ "_" + season(0) + filename_add) - nframe = 0 ; difference plots will be saved to a different file if (flag_diff) then wks0d = get_wks("dummy_for_wks", DIAG_SCRIPT, "clouds_zonal_" + \ @@ -575,7 +574,6 @@ begin maps(imod, 0) = gsn_csm_contour(wks0, data1, res) end if end if - nframe = nframe + 1 ; mandatory netcdf output @@ -1121,19 +1119,11 @@ begin 4, pres) end if end if ; if embracesetup - nframe = nframe + 1 do is = 0, numseas - 1 log_info("Wrote " + outfile(is)) end do - do is = 0, numseas - 1 - suffix = get_file_suffix(outfile(is), 0) - if (suffix .eq. ".png") then - outfile(is) = suffix@fBase + "." + sprinti("%0.6i", nframe) + suffix - end if - end do - ; ------------------------------------------------------------------------ ; write provenance to netcdf output and plot file(s) (mean) ; ------------------------------------------------------------------------ @@ -1290,13 +1280,6 @@ begin end if ; end if embracesetup ndframe = ndframe + 1 - do is = 0, numseas - 1 - suffix = get_file_suffix(outfile_d(is), 0) - if (suffix .eq. ".png") then - outfile_d(is) = suffix@fBase + "." + sprinti("%0.6i", ndframe) + suffix - end if - end do - do is = 0, numseas - 1 log_info(" Wrote " + outfile(is)) diff --git a/esmvaltool/diag_scripts/examples/diagnostic.py b/esmvaltool/diag_scripts/examples/diagnostic.py index 6ef63d13f1..a9e4f8667e 100644 --- a/esmvaltool/diag_scripts/examples/diagnostic.py +++ b/esmvaltool/diag_scripts/examples/diagnostic.py @@ -20,6 +20,9 @@ def get_provenance_record(attributes, ancestor_files): """Create a provenance record describing the diagnostic data and plot.""" + # Associated recipe uses contains a caption string with placeholders + # like {long_name} that are now populated from attributes dictionary. + # Note that for simple recipes, caption can be set here as a simple string caption = attributes['caption'].format(**attributes) record = { diff --git a/esmvaltool/diag_scripts/examples/make_plot.py b/esmvaltool/diag_scripts/examples/make_plot.py new file mode 100644 index 0000000000..796d55cd49 --- /dev/null +++ b/esmvaltool/diag_scripts/examples/make_plot.py @@ -0,0 +1,76 @@ +"""Python example diagnostic.""" +import logging +from pathlib import Path + +import iris +import matplotlib.pyplot as plt + +from esmvaltool.diag_scripts.shared import run_diagnostic, save_figure + +logger = logging.getLogger(Path(__file__).stem) + + +def main(cfg): + """Plot part of figure_9.3a from IPCC AR6.""" + colors = { + 'historical-ssp126': '#2a3652', + 'historical-ssp585': '#78333a', + } + fill_colors = { + 'historical-ssp126': '#d2d5dc', + 'historical-ssp585': '#ddced2', + } + labels = { + 'historical-ssp126': 'Historical and SSP1-2.6', + 'historical-ssp585': 'Historical and SSP5-8.5', + } + + # Group input data by experiment + groups = {} + for filename, attributes in cfg['input_data'].items(): + exp = attributes['exp'] + if exp not in groups: + groups[exp] = {} + groups[exp][attributes['dataset']] = filename + + # Loop over experiments to populate plot + for exp, group in groups.items(): + mean = iris.load_cube(group['MultiModelMean']) + iris.quickplot.plot( + mean, + color=colors.get(exp), + label=labels.get(exp, exp), + ) + + p17 = iris.load_cube(group['MultiModelPercentile17']) + p83 = iris.load_cube(group['MultiModelPercentile83']) + time_coord = mean.coord('time') + time_axis = time_coord.units.num2date(time_coord.core_points()) + plt.fill_between( + time_axis, + p17.core_data(), + p83.core_data(), + color=fill_colors.get(exp), + label='Likely (17% - 83%) ranges', + ) + + plt.title('Sea surface temperature anomaly') + plt.legend(loc='upper left') + + filename = 'IPCC_AR6_figure_9.3a_1850-2100' + provenance_record = { + 'caption': "Part of figure 9.3a from IPCC AR6.", + 'authors': [ + 'kalverla_peter', + 'andela_bouwe', + ], + 'references': ['fox-kemper21ipcc'], + 'ancestors': list(cfg['input_data'].keys()), + } + save_figure(filename, provenance_record, cfg, dpi=300) + + +if __name__ == '__main__': + + with run_diagnostic() as config: + main(config) diff --git a/esmvaltool/diag_scripts/galytska23/select_variables_for_tigramite.py b/esmvaltool/diag_scripts/galytska23/select_variables_for_tigramite.py index 2ceba18b21..424f089d69 100644 --- a/esmvaltool/diag_scripts/galytska23/select_variables_for_tigramite.py +++ b/esmvaltool/diag_scripts/galytska23/select_variables_for_tigramite.py @@ -45,11 +45,27 @@ save_data, ) from esmvaltool.diag_scripts.shared._base import ( - get_plot_filename, -) + get_plot_filename, ) logger = logging.getLogger(Path(__file__).stem) +# Fixed parameters +# list of variables to be ignored per model +ignored_variables = {"HadISST": ["heat_flux"]} + +# list of variables per dataset that will be processed +proc_vars = { + "ERA5": [ + 'PV', 'Arctic_temperature', 'Psl_Ural', 'Psl_Sib', 'Psl_Aleut', + 'heat_flux' + ], + "HadISST": ['BK_sic', 'Ok_sic'], + "all_other_datasets": [ + 'PV', 'Arctic_temperature', 'Psl_Ural', 'Psl_Sib', 'Psl_Aleut', + 'heat_flux', 'BK_sic', 'Ok_sic' + ], +} + def get_provenance_record(ancestor_files): """Create a provenance record describing the diagnostic data and plot.""" @@ -124,55 +140,68 @@ def calculate_heat_flux(list_va_ta): return hf_anom_zm -def variable_cases(var, item): +def variable_cases(var_name, var): """Match preprocessor name and corresponding calculations.""" - if var == 'pv': - out_var = calculate_polar_vortex(item) - elif var == 'pre_tas': - out_var = calculate_arctic_tas(item) - elif var == 'pressure_ural': - out_var = calculate_slp(item) + if var_name == 'pv': + out_var = calculate_polar_vortex(var) + elif var_name == 'pre_tas': + out_var = calculate_arctic_tas(var) + elif var_name == 'pressure_ural': + out_var = calculate_slp(var) out_var.var_name = 'Psl_Ural' - elif var == 'pressure_sib': - out_var = calculate_slp(item) + elif var_name == 'pressure_sib': + out_var = calculate_slp(var) out_var.var_name = 'Psl_Sib' - elif var == 'pressure_aleut': - out_var = calculate_slp(item) + elif var_name == 'pressure_aleut': + out_var = calculate_slp(var) out_var.var_name = 'Psl_Aleut' - elif var == 'bk_ice': - out_var = finalize_bk_ice(item) - elif var == 'ok_ice': - out_var = finalize_ok_ice(item) - elif var == 'heat_flux': - out_var = prepare_heat_flux(item) + elif var_name == 'bk_ice': + out_var = finalize_bk_ice(var) + elif var_name == 'ok_ice': + out_var = finalize_ok_ice(var) + elif var_name == 'heat_flux': + out_var = prepare_heat_flux(var) else: - raise NotImplementedError(f"Variable '{var}' not supported") + raise NotImplementedError(f"Variable '{var_name}' not yet supported.") return out_var -def calculate_variables(input_dict): +def calculate_variables(dataset_dict): """Calculate all necessary variables.""" logger.debug("Variables are calculated for the following datasources:%s", - input_dict.keys()) - dictionary = {} - for key, value in input_dict.items(): - logger.debug("Calculating final variables for %s dataset", key) - dictionary.setdefault(key, {}) - tmp_list = [] - for item in value: - if item['preprocessor'] == "heat_flux": - tmp_list.append(variable_cases(item['preprocessor'], item)) - else: - dictionary[key].setdefault( - variable_cases(item['preprocessor'], item).var_name, - variable_cases(item['preprocessor'], item) - ) - - if key != "HadISST": - # Calculate heat flux for all data sources except HadISST + dataset_dict.keys()) + processed_vars = {} + for dataset, variables in dataset_dict.items(): + processed_vars[dataset] = {} + + logger.debug("Calculating final variables %s for %s dataset", + variables, dataset) + + if dataset in ignored_variables: + to_ignore_vars = ignored_variables.get(dataset, None) + for var in variables: + var_name = var['preprocessor'] + if var_name not in to_ignore_vars: + new_var = variable_cases(var_name, var) + new_var_name = new_var.var_name + processed_vars[dataset][new_var_name] = new_var + else: + tmp_list = [] + for var in variables: + var_name = var['preprocessor'] + if var_name == "heat_flux": + tmp_list.append(variable_cases(var_name, var)) + else: + new_var = variable_cases(var_name, var) + new_var_name = new_var.var_name + processed_vars[dataset][new_var_name] = new_var + if len(tmp_list) != 2: + raise IndexError("The preprocessor heat flux requests two \ + variables in the recipe: va and ta") heat_flux = calculate_heat_flux(tmp_list) - dictionary[key].setdefault(heat_flux.var_name, heat_flux) - return dictionary + processed_vars[dataset][heat_flux.var_name] = heat_flux + + return processed_vars def plotting_support(cube, key, **kwargs): @@ -193,52 +222,84 @@ def plot_timeseries(dictionary, var, cfg): colors = plt.cm.viridis(np.linspace(0, 1, len(dictionary.keys()))) baseplotname = f"Timeseries_{var}_anomalies" filename = get_plot_filename(baseplotname, cfg) - for i, key in enumerate(dictionary.keys()): - if var not in ('BK_sic', 'Ok_sic'): - if key == "HadISST": + for idx, dataset in enumerate(dictionary.keys()): + if var not in proc_vars["HadISST"]: + if dataset == "HadISST": continue - if key != 'ERA5': - plotting_support(dictionary[key][var], key, - color=colors[i]) + if dataset != 'ERA5': + plotting_support(dictionary[dataset][var], + dataset, color=colors[idx]) else: - plotting_support(dictionary[key][var], key, - color='k', linewidth=2) + plotting_support(dictionary[dataset][var], + dataset, + color='k', + linewidth=2) else: - if key == "ERA5": + if dataset == "ERA5": continue - if key != 'HadISST': - plotting_support(dictionary[key][var], key, color=colors[i]) + if dataset != 'HadISST': + plotting_support(dictionary[dataset][var], + dataset, color=colors[idx]) else: - plotting_support(dictionary[key][var], key, color='blue', + plotting_support(dictionary[dataset][var], + dataset, + color='blue', linewidth=2) fig.savefig(filename, bbox_inches='tight') +def assemble_cube_list(dataset, var, special_datasets): + """ + Assemble a list of processed vars cubes. + + Depending on what vars are needed per dataset, + variables list differs per analyzed dataset. Dict holding the + needed variables per dataset needs updating everytime a new dataset + or variable gets included. + + Parameters + ---------- + dataset: str + dataset name. + var: dict + variable dictionary. + special_datasets: list + list of datasets to be treated separately, + with restricted variables. + type: list of datasets (list of strings). + + Returns + ------- + iris.cube.CubeList + list of cubes. + """ + if dataset not in special_datasets: + cube_list = iris.cube.CubeList( + [var[proc_var] for proc_var in proc_vars["all_other_datasets"]]) + else: + cube_list = iris.cube.CubeList( + [var[proc_var] for proc_var in proc_vars[dataset]]) + + return cube_list + + def main(cfg): """Calculate and save final variables into .nc files.""" + special_datasets = ["ERA5", "HadISST"] + my_files_dict = group_metadata(cfg['input_data'].values(), 'dataset') all_variables = calculate_variables(my_files_dict) + # Check is timeseries should be plotted - if cfg['plot_timeseries'] is True: + if cfg['plot_timeseries']: plot_timeseries(all_variables, cfg['variable_to_plot'], cfg) - for key in my_files_dict: - logger.info("Processing final calculations in dataset %s", key) - prov_record = get_provenance_record([key]) - var = all_variables[key] - if key == "ERA5": - cube_list = iris.cube.CubeList([ - var['PV'], var['Arctic_temperature'], var['Psl_Ural'], - var['Psl_Sib'], var['Psl_Aleut'], var['heat_flux']]) - elif key == "HadISST": - cube_list = iris.cube.CubeList([ - var['BK_sic'], var['Ok_sic']]) - else: - cube_list = iris.cube.CubeList([ - var['PV'], var['Arctic_temperature'], var['Psl_Ural'], - var['Psl_Sib'], var['Psl_Aleut'], var['heat_flux'], - var['BK_sic'], var['Ok_sic']]) - save_data(key, prov_record, cfg, cube_list) - logger.info("%s data is saved in .nc", key) + for dataset in my_files_dict: + logger.info("Processing final calculations in dataset %s", dataset) + prov_record = get_provenance_record([dataset]) + var = all_variables[dataset] + cube_list = assemble_cube_list(dataset, var, special_datasets) + save_data(dataset, prov_record, cfg, cube_list) + logger.info("%s data is saved in .nc", dataset) logger.info("Done.") diff --git a/esmvaltool/diag_scripts/iht_toa/poisson_solver.py b/esmvaltool/diag_scripts/iht_toa/poisson_solver.py new file mode 100644 index 0000000000..886d5fc616 --- /dev/null +++ b/esmvaltool/diag_scripts/iht_toa/poisson_solver.py @@ -0,0 +1,293 @@ +# (C) Crown Copyright 2023, the Met Office. +"""Poisson solver for the full ocean-atmosphere column. + +The Poisson equation is solved by numerically using the bi-conjugate +gradient stabilized (BiCGSTAB) method. + +The solution is achieved when the difference between the input field (radiative +flux) and the Laplacian of the output field is less than the stated tolerance. +If the solver fails to converge, the tolerance can be increased. + +Convergence is achieved faster by using a preconditioner on the output field. + +The heat transport is calculated as the gradient of the energy flux potential, +the output of the Poisson solver. +""" + +import numpy as np +from numba import jit + + +def swap_bounds(array): + """Extend the array by one in all directions. + + As the array is periodic it allows for easier computations at + boundaries. + """ + shape0, shape1 = np.array(array.shape) - 2 + wrap_point = int(shape1 / 2 + 1) + for i in range(1, shape1 + 1): + array[0, i] = array[1, wrap_point] + array[shape0 + 1, i] = array[shape0, wrap_point] + wrap_point += 1 + if wrap_point > shape1: + wrap_point = 1 + + array[:, 0] = array[:, shape1] + array[:, shape1 + 1] = array[:, 1] + + return array + + +def dot_prod(a_matrix, b_matrix): + """Calculate dot product of two matrices only over source term size.""" + shape0, shape1 = np.array(a_matrix.shape) - 2 + return (a_matrix[1:shape0 + 1, 1:shape1 + 1] * + b_matrix[1:shape0 + 1, 1:shape1 + 1]).sum() + + +def precon(x_matrix, m_matrix): + """Preconditioner. + + This is a wrapper to two steps that are optimised using jit. + It implements the preconditioning step of van der Vorst, H. A., 1992. + https://doi.org/10.1137/0913035. + """ + cx_matrix = np.zeros(np.array(x_matrix.shape)) + precon_a(x_matrix, m_matrix[1], m_matrix[2], m_matrix[4], cx_matrix) + cx_matrix = swap_bounds(cx_matrix) + precon_b(m_matrix[0], m_matrix[3], cx_matrix) + cx_matrix = swap_bounds(cx_matrix) + return cx_matrix + + +@jit(nopython=True) +def precon_a(x_matrix, m_w, m_s, m_p, cx_matrix): + """First step of preconditioner.""" + shape0, shape1 = np.array(cx_matrix.shape) - 2 + for j in range(1, shape0 + 1): + for i in range(1, shape1 + 1): + cx_matrix[j, i] = m_p[j, i] * (x_matrix[j, i] - + m_s[j, i] * cx_matrix[j - 1, i] - + m_w[j, i] * cx_matrix[j, i - 1]) + + +@jit(nopython=True) +def precon_b(m_e, m_n, cx_matrix): + """Second step of preconditioner.""" + shape0, shape1 = np.array(cx_matrix.shape) - 2 + for j in range(shape0, 0, -1): + for i in range(shape1, 0, -1): + cx_matrix[j, i] = (cx_matrix[j, i] - + m_e[j, i] * cx_matrix[j, i + 1] - + m_n[j, i] * cx_matrix[j + 1, i]) + + +class SphericalPoisson: + """Poisson solver over the sphere. + + Solve Poisson equation for a given source term (forcing) and + calculate meridional heat transport (MHT). + """ + + def __init__(self, logger, source, tolerance=2.0e-4): + """Initialise solver with source field, metrics and matrices.""" + self.logger = logger + self.source = source + self.tolerance = tolerance + self.energy_flux_potential = None + self.meridional_heat_transport = None + logger.info("Initialising Poisson solver.") + self.set_matrices() + + def set_matrices(self): + """Calculate A and M matrices. + + A is the matrix that defines the five-point stencil (Eq. 8). The + A_matrix are the values are the contributions from each of the + four neighbouring cells: e,w,s,n,p. + """ + # Calculate metrics hpi and hvj + src_shape = np.array(self.source.shape) + hpi = np.zeros(src_shape[0]) + hvj = np.zeros(src_shape[0] + 1) + deltay = np.pi / src_shape[0] + yyy = -0.5 * np.pi + 0.5 * deltay + hvj[0] = 0.0 + for j in range(0, src_shape[0]): + hpi[j] = np.cos(yyy) + hvj[j + 1] = np.cos(yyy + 0.5 * deltay) + yyy += deltay + hvj[-1] = 0.0 + + # Storing the full matrix + a_matrix = np.zeros((5, *src_shape)) + + # ILU factors + m_matrix = np.zeros((5, *(src_shape + 1))) + + # Spherical Laplacian variables + aaa = 1.0 / ((2.0 * np.pi / src_shape[1])**2.) + bbb = 1.0 / ((np.pi / src_shape[0])**2.) + + # First calculate the Poisson equations 5-point stencil + # A_w is the contribution from i-1, A_e is from i+1, + # A_s is j-1, A_n is j+1, and A_p is the diagonal + for j in range(0, src_shape[0]): + txa = aaa / hpi[j]**2.0 + tyb = bbb / hpi[j] + + for i in range(0, src_shape[1]): + a_matrix[0, j, i] = txa + a_matrix[1, j, i] = txa + a_matrix[2, j, i] = tyb * hvj[j] + a_matrix[3, j, i] = tyb * hvj[j + 1] + a_matrix[4, j, i] = -a_matrix[0:4, j, i].sum() + + # ILU/SIP preconditioner factors: alf = 0.0 is ILU + alf = 0.9 + m_matrix[4] += 1.0 + + for j in range(1, src_shape[0] + 1): + for i in range(1, src_shape[1] + 1): + m_matrix[2, j, i] = (a_matrix[2, j - 1, i - 1] / + (1.0 + alf * m_matrix[0, j - 1, i])) + + m_matrix[1, j, i] = (a_matrix[1, j - 1, i - 1] / + (1.0 + alf * m_matrix[3, j, i - 1])) + + m_matrix[4, j, i] = (a_matrix[4, j - 1, i - 1] - + m_matrix[2, j, i] * + (m_matrix[3, j - 1, i] - + alf * m_matrix[0, j - 1, i]) - + m_matrix[1, j, i] * + (m_matrix[0, j, i - 1] - + alf * m_matrix[3, j, i - 1])) + + m_matrix[4, j, i] = 1.0 / m_matrix[4, j, i] + + m_matrix[0, j, i] = ((a_matrix[0, j - 1, i - 1] - + alf * m_matrix[2, j, i] * + m_matrix[0, j - 1, i]) * + m_matrix[4, j, i]) + + m_matrix[3, j, i] = ((a_matrix[3, j - 1, i - 1] - + alf * m_matrix[1, j, i] * + m_matrix[3, j, i - 1]) * + m_matrix[4, j, i]) + + self.a_matrix = a_matrix + self.m_matrix = m_matrix + + def solve(self, max_iterations=1000): + """Solve equation for the source term. + + Bi-conjugate gradient stabilized numerical solver: van der + Vorst, H. A., 1992: Bi-cgstab: A fast and smoothly converging + variant of bi-cg for the solution of nonsymmetric linear + systems. SIAM Journal on Scientific and Statistical Computing, + https://doi.org/10.1137/0913035. + This solver implements the preconditioned Bi-CGSTAB algorithm, + described in page 638 of that paper. + """ + bbb = np.zeros(np.array(self.source.shape) + 2) + xxx = np.zeros(np.array(self.source.shape) + 2) + bbb[1:-1, 1:-1] = self.source + bbb = swap_bounds(bbb) + + sc_err = dot_prod(bbb, bbb) + + # Group some temporal variables + stv = { + 'alf': 1.0, + 'omg': 1.0, + 'nrm': 1.0, + 'rrr': bbb - self.calc_ax(xxx) + } + stv['crrr'] = stv['rrr'].copy() + + ppp = np.zeros(np.array(self.source.shape) + 2) + vvv = np.zeros(np.array(self.source.shape) + 2) + + iteration = 0 + while iteration < max_iterations: + rho = dot_prod(stv['rrr'], stv['crrr']) + + bet = (rho / stv['nrm']) * (stv['alf'] / stv['omg']) + + ttt = stv['rrr'] - bet * stv['omg'] * vvv + + sss = precon(ttt, self.m_matrix) + ppp = sss + bet * ppp + + vvv = self.calc_ax(ppp) + stv['nrm'] = dot_prod(stv['crrr'], vvv) + + stv['alf'] = rho / stv['nrm'] + sss = stv['rrr'] - stv['alf'] * vvv + + csss = precon(sss, self.m_matrix) + ttt = self.calc_ax(csss) + + stv['omg'] = dot_prod(ttt, sss) / dot_prod(ttt, ttt) + + xxx = xxx + stv['alf'] * ppp + stv['omg'] * csss + stv['rrr'] = sss - stv['omg'] * ttt + + stv['nrm'] = rho + + if abs(stv['omg']) < 1.0e-16: + self.logger.info('Terminating Poisson solver.') + break + + err = np.sqrt(dot_prod(stv['rrr'], stv['rrr']) / sc_err) + if err < self.tolerance: + self.logger.info('Poisson solver has converged.') + break + + iteration += 1 + + if iteration == max_iterations: + raise RuntimeError('Poisson solver has not converged.') + + self.energy_flux_potential = xxx + + def calc_meridional_heat_transport(self): + """Meridional heat transport of energy flux potential. + + Calculate of the meridional heat transport using the gradient of + the energy flux potential. Equation (11) in Pearce and Bodas- + Salcedo (2023). + """ + deltax = 2.0 * np.pi / self.source.shape[1] + deltay = np.pi / self.source.shape[0] + yvalues = np.arange(-0.5 * np.pi + 0.5 * deltay, 0.5 * np.pi, deltay) + grad_phi = np.gradient(self.energy_flux_potential, deltay, axis=0) + grad_phi = grad_phi[1:-1, 1:-1] + self.meridional_heat_transport = np.sum((grad_phi.T * + np.cos(yvalues) * + deltax).T, axis=1) + + def calc_ax(self, x_matrix): + """Matrix calculation of the Laplacian equation, LHS of Eq. + + (9) in Pearce and Bodas-Salcedo (2023). + """ + # Laplacian equation + src_shape = np.array(self.source.shape) + ax_matrix = np.zeros(src_shape + 2) + x_matrix = swap_bounds(x_matrix) + shape0, shape1 = src_shape + ax_matrix[1:shape0 + 1, 1:shape1 + 1] = ( + self.a_matrix[2, 0:shape0, 0:shape1] * + x_matrix[0:shape0, 1:shape1 + 1] + + self.a_matrix[1, 0:shape0, 0:shape1] * + x_matrix[1:shape0 + 1, 0:shape1] + + self.a_matrix[0, 0:shape0, 0:shape1] * + x_matrix[1:shape0 + 1, 2:shape1 + 2] + + self.a_matrix[3, 0:shape0, 0:shape1] * + x_matrix[2:shape0 + 2, 1:shape1 + 1] + + self.a_matrix[4, 0:shape0, 0:shape1] * + x_matrix[1:shape0 + 1, 1:shape1 + 1]) + ax_matrix = swap_bounds(ax_matrix) + return ax_matrix diff --git a/esmvaltool/diag_scripts/iht_toa/single_model_diagnostics.py b/esmvaltool/diag_scripts/iht_toa/single_model_diagnostics.py new file mode 100644 index 0000000000..fc917a8aa5 --- /dev/null +++ b/esmvaltool/diag_scripts/iht_toa/single_model_diagnostics.py @@ -0,0 +1,993 @@ +# (C) Crown Copyright 2023, the Met Office. +"""Single model diagnostics. + +Apply Poisson solver to input fluxes and produce plots. +""" + +import datetime +import logging +from copy import deepcopy + +import cartopy.crs as ccrs +import iris +import iris.plot as iplt +import matplotlib.dates as mdates +import matplotlib.pyplot as plt +import numpy as np +from iris import NameConstraint +from matplotlib import gridspec, rcParams +from poisson_solver import SphericalPoisson + +from esmvaltool.diag_scripts.shared import ( + group_metadata, + run_diagnostic, + save_figure, +) + +# Initialise logger +logger = logging.getLogger(__name__) + +rcParams.update({ + 'font.size': 14, + 'xtick.major.pad': 10, + 'ytick.major.pad': 10, + 'xtick.major.size': 10, + 'ytick.major.size': 10, + 'xtick.minor.size': 5, + 'ytick.minor.size': 5, + 'axes.linewidth': 2, + 'lines.markersize': 8, + 'lines.linewidth': 2 +}) + +# Figure captions +caption = { + 'F1': 'Figure 1. The implied heat transport due to TOA net flux (blue), ' + 'split into the contributions from SW (orange) and LW (green).', + 'F2': 'Figure 2. The TOA energy flux potentials for (a) TOT, (c) ' + 'SW, and (e) LW net fluxes, alongside maps of the spatial anomalies ' + 'of the input fluxes [(b),(d),(f)]. The implied heat transport is ' + 'the gradient of the energy flus potential, shown by the white ' + 'vector arrows (with the same magnitude scale across all subplots). ' + 'Heat is directed from the blue minima of the potential field to ' + 'yellow maxima, with the magnitude implied by the density of ' + 'contours. All maps of the same type share the same color bar at ' + 'the bottom of the column so that it is possible to directly ' + 'compare the results from different fluxes.', + 'F3': 'Figure 3. Direct radiative effects of clouds on the meridional ' + 'heat transport. (a) Contributions from TOT CRE (blue), SW CRE ' + '(orange), and LW CRE (green). (b) Contributions from all-sky and ' + 'clear-sky OSR. Both curves have been multiplied by -1 such that ' + 'positive heat transport is northward.', + 'F4': 'Figure 4. As in Figure 2, but for cloud radiative effects.', + 'F5': 'Figure 5. As in Figure 2, but for energy flux potentials and ' + 'spatial radiative anomalies associated with all-sky and clear-sky ' + 'outgoing shortwave radiation. ', + 'F6': 'Figure 6. A measure of the symmetry between heat transport in the ' + 'Northern and Southern Hemispheres, calculated for the 12-month ' + 'running mean of MHT in (a) the full hemisphere, (b) from the ' + 'equator to 30 deg latitude, and (c) between 30 and 90 deg ' + 'latitude. Symmetry values obtained when including (blue) and ' + 'excluding (orange) the effect of clouds are shown. The ' + 'climatological symmetry values for the two cases are shown as ' + 'black lines in each subplot. The standard deviations of the ' + 'time series are shown in each subplot.', +} + + +def get_provenance_record(filenames, figure_caption): + """Return a provenance record describing the plot. + + Parameters + ---------- + filenames : list of strings + The filenames containing the data used to create the plot. + figure_caption : string + Detailed description of the figure. + + Returns + ------- + dictionary + The provenance record describing the plot. + """ + record = { + 'ancestors': filenames, + 'caption': figure_caption, + 'references': ['pearce23jclim'] + } + return record + + +def matching_strings(list_of_strings, substrings): + """Return subset of ``list_of_strings`` with matches in ``substrings``. + + Parameters + ---------- + list_of_strings : list of strings + List of strings to be searched. + substrings : list of strings + The list of search strings. + + Returns + ------- + list + The elements in ``list_of_strings`` that contain + any of the substrings. + """ + matches = [] + for element in list_of_strings: + for var in substrings: + if var in element: + matches.append(element) + return matches + + +def area_average(cube, latitude='latitude', longitude='longitude', mdtol=1): + """Return area-weighted average of a cube. + + Parameters + ---------- + cube : :class:`iris.cube.Cube` + Input cube. + latitude : string + Name of latitude coordinate in ``cube``. + longitude : string + Name of longitude coordinate in ``cube``. + mdtol : float + Tolerance to missing data, between 0 and 1. + + + Returns + ------- + :class:`iris.cube.Cube` + Collapsed cube with the weighted average. + """ + if cube.coord(latitude).bounds is None: + cube.coord(latitude).guess_bounds() + if cube.coord(longitude).bounds is None: + cube.coord(longitude).guess_bounds() + grid_areas = iris.analysis.cartography.area_weights(cube) + cube_avg = cube.collapsed([longitude, latitude], + iris.analysis.MEAN, + weights=grid_areas, + mdtol=mdtol) + return cube_avg + + +def weight_zm(cube, latitude=None): + """Weight zonal-mean by normalised gridbox areas. + + Parameters + ---------- + cube : :class:`iris.cube.Cube` + Input cube. + latitude : tuple + Four-element tuple defining the latitude range. + The last two elements must be False, e.g. + latitude=(-90, 0, False, False). + + Returns + ------- + :class:`numpy.array` + Zonal-mean in the selected latitude range, weighted + by the normalised areas. + """ + if cube.coord('latitude').bounds is None: + cube.coord('latitude').guess_bounds() + areas_data = iris.analysis.cartography.area_weights(cube, normalize=True) + cube_areas = iris.cube.Cube(areas_data, + long_name="normalised_area", + var_name="area", units="1", + dim_coords_and_dims=[(cube.coords()[0], 0)]) + if latitude is not None: + cube = cube.intersection(latitude=latitude) + cube_areas = cube_areas.intersection(latitude=latitude) + return cube.data * cube_areas.data + + +def call_poisson(flux_cube, latitude='latitude', longitude='longitude'): + """Call the Poisson solver with the data in ``flux_cube`` as source term. + + Return the energy flux potential and implied meridional heat transport + as cubes. + + Parameters + ---------- + flux_cube : :class:`iris.cube.Cube` + Input cube. + latitude : string + Name of latitude coordinate in ``cube``. + longitude : string + Name of longitude coordinate in ``cube``. + + Returns + ------- + efp_cube: :class:`iris.cube.Cube` + Energy flux potential cube. + mht_cube: :class:`iris.cube.Cube` + Implied meridional heat transport associated + with the source flux field. + """ + earth_radius = 6371e3 # Earth's radius in m + if flux_cube.coord(latitude).bounds is None: + flux_cube.coord(latitude).guess_bounds() + if flux_cube.coord(longitude).bounds is None: + flux_cube.coord(longitude).guess_bounds() + + # Remove average of flux field to account for storage term + grid_areas = iris.analysis.cartography.area_weights(flux_cube) + data_mean = flux_cube.collapsed(["longitude", "latitude"], + iris.analysis.MEAN, + weights=grid_areas).data + data = flux_cube.data - data_mean + + logger.info("Calling spherical_poisson") + sphpo = SphericalPoisson(logger, + source=data * (earth_radius**2.0), + tolerance=2.0e-4) + sphpo.solve() + sphpo.calc_meridional_heat_transport() + logger.info("Ending spherical_poisson") + + # Energy flux potential + efp_cube = iris.cube.Cube(sphpo.energy_flux_potential[1:-1, 1:-1], + long_name=f"energy_flux_potential" + f"_of_{flux_cube.var_name}", + var_name=f"{flux_cube.var_name}_efp", + units='J s-1', + dim_coords_and_dims=[(flux_cube.coords()[0], 0), + (flux_cube.coords()[1], 1)]) + + # MHT data cube + collapsed_longitude = iris.coords.AuxCoord(180.0, + bounds=(0.0, 360.0), + long_name='longitude', + standard_name='longitude', + units='degrees') + dim_coords_and_dims = [(flux_cube.coord('latitude'), 0)] + aux_coords_and_dims = [(flux_cube.coord('time'), None), + (collapsed_longitude, None)] + mht_cube = iris.cube.Cube(sphpo.meridional_heat_transport, + long_name=f"meridional_heat_transport_of" + f"_{flux_cube.var_name}", + var_name=f"{flux_cube.var_name}_mht", + units='W', + dim_coords_and_dims=dim_coords_and_dims, + aux_coords_and_dims=aux_coords_and_dims) + return efp_cube, mht_cube + + +def symmetry_metric(cube): + """Calculate symmetry metrics for a zonal-mean cube. + + It returns the symmetry metric S, as defined in Pearce and + Bodas-Salcedo, JClim, 2023, for 3 regions: entire hemisphere, + tropics (0 to 30 deg latitude) and extratropics + (30 to 90 degrees latitude). Perfectly symmetrical latitude + bands give S=0. + + Parameters + ---------- + cube : :class:`iris.cube.Cube` + Input cube. + + Returns + ------- + hemisphere: float + Metric for the whole hemisphere. + tropics: float + Metric for the tropics. + extra_tropics: float + Metric for the extra-tropics. + """ + hemisphere = np.abs( + weight_zm(cube, latitude=(0, 90, False, False))[::-1] + + weight_zm(cube, latitude=(-90, 0, False, False))).sum() + tropics = np.abs( + weight_zm(cube, latitude=(0, 30, False, False))[::-1] + + weight_zm(cube, latitude=(-30, 0, False, False))).sum() + extra_tropics = np.abs( + weight_zm(cube, latitude=(30, 90, False, False))[::-1] + + weight_zm(cube, latitude=(-90, -30, False, False))).sum() + return hemisphere, tropics, extra_tropics + + +def format_plot(axes, label, title): + """Format plots in quiver panel. + + Parameters + ---------- + axes : :class:`matplotlib.axes.Axes` + Input axes. + label : string + Top-left plot label. + title : string + Plot title. + """ + axes.set_xticks(np.arange(-180, 190, 60)) + axes.set_xticklabels(['180', '120W', '60W', '0', '60E', '120E', '180']) + axes.set_yticks(np.arange(-90, 100, 30)) + axes.set_yticklabels(['90S', '60S', '30S', 'Eq', '30N', '60N', '90N']) + axes.annotate(label, xy=(0, 1.05), xycoords='axes fraction', color='k') + axes.set_title(title) + + +class ImpliedHeatTransport: + """Class that solves implied heat transport for an input dataset. + + These are the physical meanings of the main acronyms + used in the variable names: + FLX: radiative flux + EFP: energy flux potential + MHT: meridional heat transport + """ + + def __init__(self, flx_files): + """Calculate all the diagnostics for all fluxes in ``flx_files``. + + Parameters + ---------- + flx_files : list + List of files with input data. + """ + self.flx_files = flx_files + + # Create cube lists for the different datasets + self.flx_clim = iris.cube.CubeList() + self.mht_clim = iris.cube.CubeList() + self.efp_clim = iris.cube.CubeList() + self.mht_rolling_mean = iris.cube.CubeList() + self.symmetry_metric = iris.cube.CubeList() + + # Calculate 12-month rolling means for time series. + self.flx_rolling_mean = iris.cube.CubeList() + for flx_file in flx_files: + flx = iris.load_cube(flx_file) + if len(flx.shape) == 3: + self.flx_rolling_mean.append( + flx.rolling_window('time', iris.analysis.MEAN, 12)) + else: + self.flx_clim.append(flx) + + # Compute derived fluxes + self.derived_fluxes() + + # Calculate Energy Flux Potential and Meridional Heat Transport + # for each flux component + self.compute_efp_and_mht() + + # Times series of MHT symmetry metric + self.mht_symmetry_metrics() + + self.print() + + def compute_efp_and_mht(self): + """Calculate Energy Flux Potential and meridional heat transport. + + Loop over input data and calculate EFP and MHT of the + climatologies of radiative fluxes and the 12-month + rolling means of radiative fluxes. + """ + # Loop over climatologies + for flx in self.flx_clim: + efp, mht = call_poisson(flx) + self.efp_clim.append(efp) + self.mht_clim.append(mht) + # Loop over rolling means + for flx_rm in self.flx_rolling_mean: + mht_series = iris.cube.CubeList() + for flx in flx_rm.slices_over('time'): + efp, mht = call_poisson(flx) + mht_series.append(mht) + # Append MHT rolling mean after merging time series. + self.mht_rolling_mean.append(mht_series.merge_cube()) + + def derived_fluxes(self): + """Calculate derived radiative fluxes. + + rlnt_clim: climatology of net LW TOA + rtntcs_clim: climatology of clear-sky net TOA + rtntcs_rolling_mean: 12-month rolling mean of rtntcs + """ + # Derived TOA climatologies: rlnt_clim, rtntcs_clim + rlnt_clim = -1.0 * self.flx_clim.extract_cube( + NameConstraint(var_name="rlut")) + rlnt_clim.var_name = "rlnt" + rlnt_clim.long_name = "radiative_flux_of_rlnt" + self.flx_clim.append(rlnt_clim) + rtntcs_clim = (self.flx_clim.extract_cube( + NameConstraint(var_name="rsdt")) - + self.flx_clim.extract_cube( + NameConstraint(var_name="rsutcs")) - + self.flx_clim.extract_cube( + NameConstraint(var_name="rlutcs"))) + rtntcs_clim.var_name = "rtntcs" + rtntcs_clim.long_name = "radiative_flux_of_rtntcs" + self.flx_clim.append(rtntcs_clim) + # Annual rolling means clear-sky net total TOA + rtntcs_rolling_mean = (self.flx_rolling_mean.extract_cube( + NameConstraint(var_name="rsdt")) - + self.flx_rolling_mean.extract_cube( + NameConstraint(var_name="rsutcs")) - + self.flx_rolling_mean.extract_cube( + NameConstraint(var_name="rlutcs"))) + rtntcs_rolling_mean.var_name = "rtntcs" + rtntcs_rolling_mean.long_name = "radiative_flux_of_rtntcs" + self.flx_rolling_mean.append(rtntcs_rolling_mean) + + def print(self): + """Print variable names of all cubes in an IHT object.""" + logger.info("=== implied_heat_transport object ===") + logger.info(self.mht_clim) + info_message = "Long name: %s; Variable: %s." + for climatology in self.mht_clim: + logger.info(info_message, + climatology.long_name, + climatology.var_name) + + logger.info(self.efp_clim) + for climatology in self.efp_clim: + logger.info(info_message, + climatology.long_name, + climatology.var_name) + + logger.info(self.flx_clim) + for climatology in self.flx_clim: + logger.info(info_message, + climatology.long_name, + climatology.var_name) + + logger.info(self.mht_rolling_mean) + for rolling_mean in self.mht_rolling_mean: + logger.info(info_message, + rolling_mean.long_name, + rolling_mean.var_name) + + logger.info(self.symmetry_metric) + for metric in self.symmetry_metric: + logger.info(info_message, + metric.long_name, + metric.var_name) + + logger.info(self.flx_files) + + def mht_symmetry_metrics(self): + """Calculate symmetry metrics. + + Produce 12-month rolling means for all monthly time series + of MHT. + """ + petaunit = 1.0e15 + for mht_series in self.mht_rolling_mean: + time_coord = mht_series.coord('time') + ntime = time_coord.shape[0] + hemisphere = np.zeros(ntime) + tropics = np.zeros(ntime) + extra_tropics = np.zeros(ntime) + for i in np.arange(ntime): + hemisphere[i], tropics[i], extra_tropics[i] = ( + symmetry_metric(mht_series[i])) + # Create the cubes for each metric + long_name = f"symmetry_hemisphere_of_{mht_series.long_name}" + var_name = f"s_hem_{mht_series.var_name}" + cube_h = iris.cube.Cube(hemisphere / petaunit, + long_name=long_name, + var_name=var_name, + units="PW", + dim_coords_and_dims=[(time_coord, 0)]) + long_name = f"symmetry_tropics_of_{mht_series.long_name}" + var_name = f"s_tro_{mht_series.var_name}" + cube_t = iris.cube.Cube(tropics / petaunit, + long_name=long_name, + var_name=var_name, + units="PW", + dim_coords_and_dims=[(time_coord, 0)]) + long_name = f"symmetry_extratropics_of_{mht_series.long_name}" + var_name = f"s_ext_{mht_series.var_name}" + cube_e = iris.cube.Cube(extra_tropics / petaunit, + long_name=long_name, + var_name=var_name, + units="PW", + dim_coords_and_dims=[(time_coord, 0)]) + self.symmetry_metric.append(cube_h) + self.symmetry_metric.append(cube_t) + self.symmetry_metric.append(cube_e) + + def mht_plot(self, var_names, legend_label, ylim=(-10, 10)): + """Produce a single multi-line plot of MHT components. + + MHT is presented in PW. Up to three variables are on each plot. + + Parameters + ---------- + var_names : list + Variable names to plot, e.g. ["rtnt_mht", "rsnt_mht"]. + legend_label : list + List of labels for each line. + ylim : tuple + y axis limits. + """ + plt.figure() + for i, vname in enumerate(var_names): + mht = self.mht_clim.extract_cube(NameConstraint(var_name=vname)) + mht.convert_units('PW') + plt.plot(mht.coord('latitude').points, + mht.data, + label=legend_label[i]) + plt.hlines(0, -90, 90, color='k', linestyles=':') + plt.vlines(0, -10, 10, color='k', linestyles=':') + plt.xlim(-90, 90) + plt.ylim(ylim[0], ylim[1]) + plt.xticks(np.arange(-90, 120, 30)) + plt.xlabel('Latitude') + plt.ylabel('MHT (PW)') + plt.legend() + plt.tight_layout() + + def cre_mht_plot(self, left, right, ylim=(-1.5, 1.5)): + """Produce two multiline plots of MHT components. + + Parameters + ---------- + left : dictionary + Dictionary with variable names and labels for + the LHS plot, e.g. + {'vname': ['netcre_mht', 'swcre_mht', 'lwcre_mht'], + 'legend': ['Net CRE', 'SW CRE', 'LW CRE']} + right : dictionary + As ``left`` but for the RHS plot + ylim : tuple + y axis limits. + """ + plt.figure(figsize=(11, 5)) + ax1 = plt.subplot(121) + for i, vname in enumerate(left['vname']): + mht = self.mht_clim.extract_cube(NameConstraint(var_name=vname)) + mht.convert_units('PW') + ax1.plot(mht.coord('latitude').points, + mht.data, + label=left['legend'][i]) + ax1.axhline(0, color='k', ls=':') + ax1.axvline(0, color='k', ls=':') + ax1.set_xlim(-90, 90) + ax1.set_xticks(np.arange(-90, 120, 30)) + ax1.set_xlabel('Latitude') + ax1.set_ylim(ylim[0], ylim[1]) + ax1.set_ylabel('MHT (PW)') + ax1.annotate('(a)', + xy=(0.01, 0.95), + xycoords='axes fraction', + color='k') + plt.legend() + + ax2 = plt.subplot(122) + col = ['C3', 'C7'] + for i, vname in enumerate(right['vname']): + mht = self.mht_clim.extract_cube(NameConstraint(var_name=vname)) + mht.convert_units('PW') + ax2.plot(mht.coord('latitude').points, + -mht.data, + label=right['legend'][i], + color=col[i]) + ax2.axhline(0, color='k', ls=':') + ax2.axvline(0, color='k', ls=':') + ax2.set_xlim(-90, 90) + ax2.set_xticks(np.arange(-90, 120, 30)) + ax2.set_xlabel('Latitude') + ax2.set_ylim(ylim[0], ylim[1]) + ax2.set_ylabel('MHT (PW)') + ax2.annotate('(b)', + xy=(0.01, 0.95), + xycoords='axes fraction', + color='k') + plt.legend(loc='lower right') + plt.tight_layout() + + def quiver_start(self, ntot, step): + """Calculate start point for quiver plot. + + Parameters + ---------- + ntot : int + Total number of points of the full vector. + step : int + Sampling step. + """ + start = (ntot - 2 - ((ntot - 2) // step) * step) // 2 + return start + + def quiver_maps_data(self, vnames, change_sign): + """Obtain data for one row of plots. + + Parameters + ---------- + vnames : list + Two-element list with the names of the EFP and + flux variables. + change_sign : list + Two-element list of booleans to indicate if + the variable has to be plotted with the sign changed. + """ + efp = self.efp_clim.extract_cube(NameConstraint(var_name=vnames[0])) + flx = self.flx_clim.extract_cube(NameConstraint(var_name=vnames[1])) + # The choice of origin for efp is arbitrary, + # we choose the unweighted mean. + efp = efp - efp.collapsed(efp.coords(), iris.analysis.MEAN) + flx = flx - area_average(flx) + if change_sign[0]: + efp = -efp + if change_sign[1]: + flx = -flx + efp.convert_units("PW") + v_component, u_component = np.gradient(efp.data) + u_component = u_component[1:-1, 1:-1] + v_component = v_component[1:-1, 1:-1] + return {'efp': efp, 'flx': flx, 'uuu': u_component, 'vvv': v_component} + + def quiver_subplot(self, dargs): + """Produce panel with maps of EFPs and fluxes. + + Plot figures with energy flux potential and gradient in the left-hand + column and the corresponding source term in the right-hand column. + + Parameters + ---------- + dargs : dictionary + Dictionary with variable names and plot configuration + data. + """ + mshgrd = np.meshgrid(self.flx_clim[0].coord('longitude').points, + self.flx_clim[0].coord('latitude').points) + nrows = len(dargs['var_name']) + # Calculate sampling for vector plot + dxy = [mshgrd[0].shape[1] // 20, mshgrd[0].shape[0] // 10] + startx = self.quiver_start(mshgrd[0].shape[1], dxy[0]) + starty = self.quiver_start(mshgrd[0].shape[0], dxy[1]) + + # Set grid layout depending on number of rows. + # Place figures every grid_step rows in the grid. + grid_step = 7 + if nrows == 3: + plt.figure(figsize=(10, 10)) + grds = gridspec.GridSpec(22, 2) + grds.update(wspace=0.25, hspace=1.5) + elif nrows == 2: + plt.figure(figsize=(10, 6.5)) + grds = gridspec.GridSpec(15, 2) + grds.update(wspace=0.25, hspace=1.5) + elif nrows == 1: + plt.figure(figsize=(12, 4)) + grds = gridspec.GridSpec(8, 2) + grds.update(wspace=0.25, hspace=1.5) + + cbs = [] + for i in range(nrows): + data = self.quiver_maps_data(dargs['var_name'][i], + dargs['change_sign'][i]) + plt.subplot(grds[i * grid_step:(i * grid_step) + grid_step, 0], + projection=ccrs.PlateCarree(central_longitude=0)) + cbs.append( + iplt.contourf(data['efp'], + levels=np.linspace(dargs['vmin'], dargs['vmax'], + dargs['nlevs']))) + plt.gca().coastlines() + if i == 0: + qqq = plt.quiver(mshgrd[0][starty::dxy[1], startx::dxy[0]], + mshgrd[1][starty::dxy[1], startx::dxy[0]], + data['uuu'][starty::dxy[1], startx::dxy[0]], + data['vvv'][starty::dxy[1], startx::dxy[0]], + pivot='mid', + color='w', + width=0.005) + else: + plt.quiver(mshgrd[0][starty::dxy[1], startx::dxy[0]], + mshgrd[1][starty::dxy[1], startx::dxy[0]], + data['uuu'][starty::dxy[1], startx::dxy[0]], + data['vvv'][starty::dxy[1], startx::dxy[0]], + pivot='mid', + scale=qqq.scale, + color='w') + format_plot(plt.gca(), dargs['label'][i][0], dargs['title'][i][0]) + + plt.subplot(grds[i * grid_step:(i * grid_step) + grid_step, 1], + projection=ccrs.PlateCarree(central_longitude=0)) + cbs.append( + iplt.contourf(data['flx'], + levels=np.linspace(dargs['wmin'], dargs['wmax'], + dargs['nwlevs']), + cmap='RdBu_r')) + plt.gca().coastlines() + format_plot(plt.gca(), dargs['label'][i][1], dargs['title'][i][1]) + + plt.subplot(grds[-1, 0]) + plt.colorbar(cbs[0], + cax=plt.gca(), + orientation='horizontal', + label='Energy flux potential (PW)') + plt.subplot(grds[-1, 1]) + plt.colorbar(cbs[1], + cax=plt.gca(), + orientation='horizontal', + label=r'Flux (Wm$^{-2}$)', + ticks=np.linspace(dargs['wmin'], dargs['wmax'], + dargs['nwlevs'])[1::dargs['wlevstep']]) + + if nrows == 3: + plt.subplots_adjust(left=0.1, right=0.94, top=1.0, bottom=0.11) + elif nrows == 2: + plt.subplots_adjust(left=0.11, right=0.9, top=1.0, bottom=0.13) + elif nrows == 1: + plt.subplots_adjust(left=0.11, right=0.9, top=1.0, bottom=0.20) + + def plot_symmetry_time_series(self): + """Produce Figure 6. + + All-sky and clear-sky time series of the symmetry metrics for + three regions: globe, tropics and extra-tropics. + """ + var_list = [["s_hem_rtnt_mht", "s_hem_rtntcs_mht"], + ["s_tro_rtnt_mht", "s_tro_rtntcs_mht"], + ["s_ext_rtnt_mht", "s_ext_rtntcs_mht"]] + col = ['C0', 'C1'] + label = [ + r'Global: 0$^\mathrm{o}$ - 90$^\mathrm{o}$', + r'Tropics: 0$^\mathrm{o}$ - 30$^\mathrm{o}$', + r'Extratropics: 30$^\mathrm{o}$ - 90$^\mathrm{o}$' + ] + legend_label = ["TOA net all-sky", "TOA net clear-sky"] + + plt.figure(figsize=(6, 12)) + for count, (var_name_1, var_name_2) in enumerate(var_list): + yy0 = self.symmetry_metric.extract_cube( + NameConstraint(var_name=var_name_1)) + yy1 = self.symmetry_metric.extract_cube( + NameConstraint(var_name=var_name_2)) + axx = plt.subplot(3, 1, count + 1) + dtx = [ + datetime.datetime.strptime(str(cell[0]), '%Y-%m-%d %H:%M:%S') + for cell in yy0.coord('time').cells() + ] + plt.plot(dtx, yy0.data, lw=4, linestyle='-', label=legend_label[0]) + plt.plot(dtx, yy1.data, lw=4, linestyle='-', label=legend_label[1]) + axx.annotate(rf'$\sigma$: {np.std(yy0.data):5.3f}', (0.05, 0.55), + xycoords='axes fraction', + color=col[0]) + axx.annotate(rf'$\sigma$: {np.std(yy1.data):5.3f}', (0.05, 0.45), + xycoords='axes fraction', + color=col[1]) + axx.set_ylim(0, 0.8) + axx.set_ylabel(r'$S$ (PW)') + axx.xaxis.set_major_locator(mdates.YearLocator(3, month=1, day=1)) + axx.xaxis.set_major_formatter(mdates.DateFormatter('%Y')) + axx.xaxis.set_minor_locator(mdates.YearLocator()) + axx.set_title(label[count]) + if count == 0: + plt.legend(loc=5) + plt.tight_layout() + + +def efp_maps(iht, model, experiment, config): + """Produce Figures 2, 4, and 5. + + Parameters + ---------- + iht : :class: ImpliedHeatTransport + Object with the recipe datasets. + model : string + Model name. + experiment : string + Experiment name. + config : dict + The ESMValTool configuration. + """ + # Figure 2 + iht.quiver_subplot( + { + 'var_name': [['rtnt_efp', 'rtnt'], ['rsnt_efp', 'rsnt'], + ['rlnt_efp', 'rlnt']], + 'title': [['$P_{TOA}^{TOT}$', r'$\Delta F_{TOA}^{TOT}$'], + ['$P_{TOA}^{SW}$', r'$\Delta F_{TOA}^{SW}$'], + ['$P_{TOA}^{LW}$', r'$\Delta F_{TOA}^{LW}$']], + 'label': [['(a)', '(b)'], ['(c)', '(d)'], ['(e)', '(f)']], + 'change_sign': [[False, False], [False, False], [False, False]], + 'wmin': + -180, + 'wmax': + 180, + 'nwlevs': + 19, + 'wlevstep': + 4, + 'vmin': + -1.2, + 'vmax': + 1.2, + 'nlevs': + 11 + }) + flx_files = matching_strings(iht.flx_files, ['rtnt/', + 'rsut/', + 'rlut/']) + provenance_record = get_provenance_record(flx_files, caption['F2']) + figname = f"figure2_{model}_{experiment}" + save_figure(figname, provenance_record, config) + # Figure 4 + iht.quiver_subplot( + { + 'var_name': [['netcre_efp', 'netcre'], ['swcre_efp', 'swcre'], + ['lwcre_efp', 'lwcre']], + 'title': [['$P_{TOA}^{TOTCRE}$', r'$\Delta CRE_{TOA}^{TOT}$'], + ['$P_{TOA}^{SWCRE}$', r'$\Delta CRE_{TOA}^{SW}$'], + ['$P_{TOA}^{LWCRE}$', r'$\Delta CRE_{TOA}^{LW}$']], + 'label': [['(a)', '(b)'], ['(c)', '(d)'], ['(e)', '(f)']], + 'change_sign': [[False, False], [False, False], [False, False]], + 'wmin': + -60, + 'wmax': + 60, + 'nwlevs': + 13, + 'wlevstep': + 2, + 'vmin': + -0.3, + 'vmax': + 0.3, + 'nlevs': + 11 + }) + flx_files = matching_strings(iht.flx_files, ['netcre/', + 'swcre/', + 'lwcre/']) + provenance_record = get_provenance_record(flx_files, caption['F4']) + figname = f"figure4_{model}_{experiment}" + save_figure(figname, provenance_record, config) + # Figure 5 + iht.quiver_subplot( + { + 'var_name': [['rsutcs_efp', 'rsutcs'], ['rsut_efp', 'rsut']], + 'title': [['$P_{TOA}^{SWup, clr}$', + r'$\Delta F_{TOA}^{SWup, clr}$'], + ['$P_{TOA}^{SWup, all}$', + r'$\Delta F_{TOA}^{SWup, all}$']], + 'label': [['(a)', '(b)'], ['(c)', '(d)']], + 'change_sign': [[True, True], [True, True]], + 'wmin': -100, + 'wmax': 100, + 'nwlevs': 21, + 'wlevstep': 3, + 'vmin': -0.35, + 'vmax': 0.35, + 'nlevs': 11 + }) + flx_files = matching_strings(iht.flx_files, ['rsut/', 'rsutcs/']) + provenance_record = get_provenance_record(flx_files, caption['F5']) + figname = f"figure5_{model}_{experiment}" + save_figure(figname, provenance_record, config) + + +def mht_plots(iht, model, experiment, config): + """Produce Figures 1 and 3. + + Parameters + ---------- + iht : :class: ImpliedHeatTransport + Object with the recipe datasets. + model : string + Model name. + experiment : string + Experiment name. + config : dict + The ESMValTool configuration. + """ + # Figure 1 + iht.mht_plot(["rtnt_mht", "rsnt_mht", "rlnt_mht"], ['Net', 'SW', 'LW']) + flx_files = matching_strings(iht.flx_files, ['rtnt/', 'rsut/', 'rlut/']) + provenance_record = get_provenance_record(flx_files, caption['F1']) + figname = f"figure1_{model}_{experiment}" + save_figure(figname, provenance_record, config) + # Figure 3 + iht.cre_mht_plot( + { + 'vname': ['netcre_mht', 'swcre_mht', 'lwcre_mht'], + 'legend': ['Net CRE', 'SW CRE', 'LW CRE'] + }, { + 'vname': ['rsut_mht', 'rsutcs_mht'], + 'legend': ['-1 x OSR (all-sky)', '-1 x OSR (clear-sky)'] + }) + flx_files = matching_strings(iht.flx_files, ['netcre/', 'swcre/', 'lwcre/', + 'rsut/', 'rsutcs/']) + provenance_record = get_provenance_record(flx_files, caption['F3']) + figname = f"figure3_{model}_{experiment}" + save_figure(figname, provenance_record, config) + + +def symmetry_plots(iht, model, experiment, config): + """Produce Figure 6. + + Parameters + ---------- + iht : :class: ImpliedHeatTransport + Object with the recipe datasets. + model : string + Model name. + experiment : string + Experiment name. + config : dict + The ESMValTool configuration. + """ + iht.plot_symmetry_time_series() + flx_files = matching_strings(iht.flx_files, ['rtnt_monthly', + 'rsutcs_monthly', + 'rlutcs_monthly', + 'rsdt_monthly']) + provenance_record = get_provenance_record(flx_files, caption['F6']) + figname = f"figure6_{model}_{experiment}" + save_figure(figname, provenance_record, config) + + +def plot_single_model_diagnostics(iht_dict, config): + """Produce plots for a single model and experiment. + + Parameters + ---------- + iht_dict : dict + iht_dict is a two-level dictionary: iht_dict[model][experiment] + config : dict + The ESMValTool configuration. + """ + for model, iht_model in iht_dict.items(): + logger.info("Plotting model: %s", model) + for experiment, iht_experiment in iht_model.items(): + logger.info("Plotting experiment: %s", experiment) + mht_plots(iht_experiment, model, experiment, config) + efp_maps(iht_experiment, model, experiment, config) + symmetry_plots(iht_experiment, model, experiment, config) + + +def main(config): + """Produce all the recipe's plots. + + Produce Figures 1 to 6 of Pearce and Bodas-Salcedo (2023) for each + model and dataset combination. + + Parameters + ---------- + config : dict + The ESMValTool configuration. + """ + input_data = deepcopy(list(config['input_data'].values())) + input_data = group_metadata(input_data, 'dataset', sort='variable_group') + + # Arrange input flux files in a 2-level dictionary [model_name][dataset] + flux_files = {} + for model_name, datasets in input_data.items(): + flux_files[model_name] = {} + for dataset in datasets: + if dataset['dataset'] in flux_files[model_name]: + flux_files[model_name][dataset['dataset']].append( + dataset['filename']) + else: + flux_files[model_name][dataset['dataset']] = [ + dataset['filename'] + ] + + # Create dictionary of implied_heat_transport objects. + # It's a 2-level dictionary like flux_files. + # This is where all the calculations are done. + iht = {} + for model_name, datasets in flux_files.items(): + logger.info("Model %s", model_name) + iht[model_name] = {} + for dataset_name, files in datasets.items(): + logger.info("Dataset %s", dataset_name) + iht[model_name][dataset_name] = ImpliedHeatTransport(files) + + # Produce plots + plot_single_model_diagnostics(iht, config) + + +if __name__ == '__main__': + + with run_diagnostic() as configuration: + main(configuration) diff --git a/esmvaltool/diag_scripts/ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl b/esmvaltool/diag_scripts/ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl index df1ea2375a..e92d60ab43 100644 --- a/esmvaltool/diag_scripts/ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl +++ b/esmvaltool/diag_scripts/ipcc_ar5/ch12_calc_IAV_for_stippandhatch.ncl @@ -410,7 +410,6 @@ begin ; collect meta-data nc_file = ncdf_outfile - plot_file = work_dir + "/None" caption = "Inter-annual variability based on piControl runs." statistics = ("var") domains = ("global") @@ -418,7 +417,7 @@ begin authors = (/"lorenz_ruth"/) references = (/"collins13ipcc"/) infiles = metadata_att_as_array(info_items, "filename") - log_provenance(nc_file, plot_file, caption, statistics, domains, \ + log_provenance(nc_file, "n/a", caption, statistics, domains, \ plot_types, authors, references, infiles) leave_msg(DIAG_SCRIPT, "") diff --git a/esmvaltool/diag_scripts/land_carbon_cycle/diag_global_turnover.py b/esmvaltool/diag_scripts/land_carbon_cycle/diag_global_turnover.py index 0d8b95ff84..d703c87a24 100644 --- a/esmvaltool/diag_scripts/land_carbon_cycle/diag_global_turnover.py +++ b/esmvaltool/diag_scripts/land_carbon_cycle/diag_global_turnover.py @@ -737,7 +737,12 @@ def main(diag_config): tau_global = ctotal_global / gpp_global tau_global.convert_units('yr') - global_tau_mod['global'][model_name] = float(tau_global.core_data()) + # since dask=2023.3 there is an issue with converting the core_data() + # to float; I have not managed to pinpoint the issue neither in dask + # nor in iris, since minimal test cases are not reproducing it + # this is a scalar cube so no big mem issue by realizing the data + # global_tau_mod['global'][model_name] = float(tau_global.core_data()) + global_tau_mod['global'][model_name] = float(tau_global.data) base_name_mod = ( 'global_{title}_{source_label}_' diff --git a/esmvaltool/diag_scripts/mder/select_for_mder.ncl b/esmvaltool/diag_scripts/mder/select_for_mder.ncl index 8c9198a8f6..d07cda657f 100644 --- a/esmvaltool/diag_scripts/mder/select_for_mder.ncl +++ b/esmvaltool/diag_scripts/mder/select_for_mder.ncl @@ -97,7 +97,7 @@ begin AUTHORS = (/"wenzel_sabrina", "schlund_manuel"/) DOMAIN = diag_script_info@domain PLOT_TYPE = "" - PLOT_FILE = "missing" + PLOT_FILE = "n/a" REFERENCES = (/"wenzel16jclim"/) ; Get reference dataset(s) diff --git a/esmvaltool/diag_scripts/mlr/custom_sklearn.py b/esmvaltool/diag_scripts/mlr/custom_sklearn.py index 2e403163cb..198cbdc376 100644 --- a/esmvaltool/diag_scripts/mlr/custom_sklearn.py +++ b/esmvaltool/diag_scripts/mlr/custom_sklearn.py @@ -86,6 +86,7 @@ _DEFAULT_TAGS = { + 'array_api_support': False, 'non_deterministic': False, 'requires_positive_X': False, 'requires_positive_y': False, diff --git a/esmvaltool/diag_scripts/monitor/monitor_base.py b/esmvaltool/diag_scripts/monitor/monitor_base.py index 135027f374..21dc159619 100644 --- a/esmvaltool/diag_scripts/monitor/monitor_base.py +++ b/esmvaltool/diag_scripts/monitor/monitor_base.py @@ -97,7 +97,9 @@ def __init__(self, config): ) plot_folder = plot_folder.replace('{plot_dir}', self.cfg[names.PLOT_DIR]) - self.plot_folder = os.path.abspath(plot_folder) + self.plot_folder = os.path.abspath( + os.path.expandvars(os.path.expanduser(plot_folder)) + ) self.plot_filename = config.get( 'plot_filename', '{plot_type}_{real_name}_{dataset}_{mip}_{exp}_{ensemble}') @@ -293,11 +295,7 @@ def get_plot_folder(self, var_info): 'real_name': self._real_name(var_info['variable_group']), **var_info } - folder = os.path.expandvars( - os.path.expanduser( - list(_replace_tags(self.plot_folder, info))[0] - ) - ) + folder = list(_replace_tags(self.plot_folder, info))[0] if self.plot_folder.startswith('/'): folder = '/' + folder if not os.path.isdir(folder): diff --git a/esmvaltool/diag_scripts/monitor/multi_datasets.py b/esmvaltool/diag_scripts/monitor/multi_datasets.py index 6ac399652b..abfed90f9d 100644 --- a/esmvaltool/diag_scripts/monitor/multi_datasets.py +++ b/esmvaltool/diag_scripts/monitor/multi_datasets.py @@ -31,7 +31,7 @@ datasets need to be given on the same horizontal and vertical grid (you can use the preprocessors :func:`esmvalcore.preprocessor.regrid` and :func:`esmvalcore.preprocessor.extract_levels` for this). Input data - needs to be 2D with dimensions `latitude`, `height`/`air_pressure`. + needs to be 2D with dimensions `latitude`, `altitude`/`air_pressure`. .. warning:: @@ -42,7 +42,28 @@ - 1D profiles (plot type ``1d_profile``): for each variable separately, all datasets are plotted in one single figure. Input data needs to be 1D with - single dimension `height` / `air_pressure` + single dimension `altitude` / `air_pressure` + - Variable vs. latitude plot (plot type ``variable_vs_lat``): + for each variable separately, all datasets are plotted in one + single figure. Input data needs to be 1D with single + dimension `latitude`. + - Hovmoeller Z vs. time (plot type ``hovmoeller_z_vs_time``): for each + variable and dataset, an individual figure is plotted. If a reference + dataset is defined, also include this dataset and a bias plot into the + figure. Note that if a reference dataset is defined, all input datasets + need to be given on the same temporal and vertical grid (you can use + the preprocessors :func:`esmvalcore.preprocessor.regrid_time` and + :func:`esmvalcore.preprocessor.extract_levels` for this). Input data + needs to be 2D with dimensions `time`, `altitude`/`air_pressure`. + - Hovmoeller time vs. latitude or longitude (plot type + ``hovmoeller_time_vs_lat_or_lon``): for each variable and dataset, an + individual figure is plotted. If a reference dataset is defined, also + include this dataset and a bias plot into the figure. Note that if a + reference dataset is defined, all input datasets need to be given on the + same temporal and horizontal grid (you can use the preprocessors + :func:`esmvalcore.preprocessor.regrid_time` and + :func:`esmvalcore.preprocessor.regrid` for this). Input data + needs to be 2D with dimensions `time`, `latitude`/`longitude`. Author ------ @@ -60,12 +81,16 @@ figure_kwargs: dict, optional Optional keyword arguments for :func:`matplotlib.pyplot.figure`. By default, uses ``constrained_layout: true``. +group_variables_by: str, optional (default: 'short_name') + Facet which is used to create variable groups. For each variable group, an + individual plot is created. plots: dict, optional Plot types plotted by this diagnostic (see list above). Dictionary keys - must be ``timeseries``, ``annual_cycle``, ``map``, ``zonal_mean_profile`` - or ``1d_profile``. - Dictionary values are dictionaries used as options for the corresponding - plot. The allowed options for the different plot types are given below. + must be ``timeseries``, ``annual_cycle``, ``map``, ``zonal_mean_profile``, + ``1d_profile``, ``variable_vs_lat``, ``hovmoeller_z_vs_time``, + ``hovmoeller_time_vs_lat_or_lon``. Dictionary values are dictionaries used + as options for the corresponding plot. The allowed options for the + different plot types are given below. plot_filename: str, optional Filename pattern for the plots. Defaults to ``{plot_type}_{real_name}_{dataset}_{mip}_{exp}_{ensemble}``. @@ -119,6 +144,10 @@ ``{project}`` that vary between the different datasets will be transformed to something like ``ambiguous_project``. Examples: ``title: 'Awesome Plot of {long_name}'``, ``xlabel: '{short_name}'``, ``xlim: [0, 5]``. +time_format: str, optional (default: None) + :func:`~datetime.datetime.strftime` format string that is used to format + the time axis using :class:`matplotlib.dates.DateFormatter`. If ``None``, + use the default formatting imposed by the iris plotting function. Configuration options for plot type ``annual_cycle`` ---------------------------------------------------- @@ -196,13 +225,17 @@ ``plot_func``. String arguments can include facets in curly brackets which will be derived from the corresponding dataset, e.g., ``{project}``, ``{short_name}``, ``{exp}``. Examples: ``default: {levels: 2}, CMIP6: - {vmin: 200, vmax: 250}``. + {vmin: 200, vmax: 250}``. In addition to the normalization_ options + supported by the plot function, the option ``norm: centered`` can be + specified. In this case, the keywords ``vcenter`` and ``halfrange`` should + be used instead of ``vmin`` or ``vmax`` (see + :class:`~matplotlib.colors.CenteredNorm`). plot_kwargs_bias: dict, optional Optional keyword arguments for the plot function defined by ``plot_func`` for plotting biases. These keyword arguments update (and potentially overwrite) the ``plot_kwargs`` for the bias plot. This option has no effect if no reference dataset is given. See option ``plot_kwargs`` for more - details. By default, uses ``cmap: bwr``. + details. By default, uses ``cmap: bwr`` and ``norm: centered``. projection: str, optional (default: 'Robinson') Projection used for the map plot. Needs to be a valid projection class of :mod:`cartopy.crs`. Keyword arguments can be specified using the option @@ -219,10 +252,9 @@ ``{project}``, ``{short_name}``, ``{exp}``. Examples: ``title: 'Awesome Plot of {long_name}'``, ``xlabel: '{short_name}'``, ``xlim: [0, 5]``. rasterize: bool, optional (default: True) - If ``True``, use `rasterization - `_ for - map plots to produce smaller files. This is only relevant for vector - graphics (e.g., ``output_file_type=pdf,svg,ps``). + If ``True``, use rasterization_ for map plots to produce smaller files. + This is only relevant for vector graphics (e.g., ``output_file_type: + pdf,svg,ps``). show_stats: bool, optional (default: True) Show basic statistics on the plots. x_pos_stats_avg: float, optional (default: 0.0) @@ -270,7 +302,7 @@ plot_func: str, optional (default: 'contourf') Plot function used to plot the profiles. Must be a function of :mod:`iris.plot` that supports plotting of 2D cubes with coordinates - latitude and height/air_pressure. + latitude and altitude/air_pressure. plot_kwargs: dict, optional Optional keyword arguments for the plot function defined by ``plot_func``. Dictionary keys are elements identified by ``facet_used_for_labels`` or @@ -280,13 +312,17 @@ ``plot_func``. String arguments can include facets in curly brackets which will be derived from the corresponding dataset, e.g., ``{project}``, ``{short_name}``, ``{exp}``. Examples: ``default: {levels: 2}, CMIP6: - {vmin: 200, vmax: 250}``. + {vmin: 200, vmax: 250}``. In addition to the normalization_ options + supported by the plot function, the option ``norm: centered`` can be + specified. In this case, the keywords ``vcenter`` and ``halfrange`` should + be used instead of ``vmin`` or ``vmax`` (see + :class:`~matplotlib.colors.CenteredNorm`). plot_kwargs_bias: dict, optional Optional keyword arguments for the plot function defined by ``plot_func`` for plotting biases. These keyword arguments update (and potentially overwrite) the ``plot_kwargs`` for the bias plot. This option has no effect if no reference dataset is given. See option ``plot_kwargs`` for more - details. By default, uses ``cmap: bwr``. + details. By default, uses ``cmap: bwr`` and ``norm: centered``. pyplot_kwargs: dict, optional Optional calls to functions of :mod:`matplotlib.pyplot`. Dictionary keys are functions of :mod:`matplotlib.pyplot`. Dictionary values are used as @@ -295,10 +331,9 @@ ``{project}``, ``{short_name}``, ``{exp}``. Examples: ``title: 'Awesome Plot of {long_name}'``, ``xlabel: '{short_name}'``, ``xlim: [0, 5]``. rasterize: bool, optional (default: True) - If ``True``, use `rasterization - `_ for - profile plots to produce smaller files. This is only relevant for vector - graphics (e.g., ``output_file_type=pdf,svg,ps``). + If ``True``, use rasterization_ for profile plots to produce smaller files. + This is only relevant for vector graphics (e.g., ``output_file_type: + pdf,svg,ps``). show_stats: bool, optional (default: True) Show basic statistics on the plots. show_y_minor_ticklabels: bool, optional (default: False) @@ -352,14 +387,209 @@ show_y_minor_ticklabels: bool, optional (default: False) Show tick labels for the minor ticks on the Y axis. +Configuration options for plot type ``variable_vs_lat`` +------------------------------------------------------- +gridline_kwargs: dict, optional + Optional keyword arguments for grid lines. By default, ``color: lightgrey, + alpha: 0.5`` are used. Use ``gridline_kwargs: false`` to not show grid + lines. +legend_kwargs: dict, optional + Optional keyword arguments for :func:`matplotlib.pyplot.legend`. Use + ``legend_kwargs: false`` to not show legends. +plot_kwargs: dict, optional + Optional keyword arguments for :func:`iris.plot.plot`. Dictionary keys are + elements identified by ``facet_used_for_labels`` or ``default``, e.g., + ``CMIP6`` if ``facet_used_for_labels: project`` or ``historical`` if + ``facet_used_for_labels: exp``. Dictionary values are dictionaries used as + keyword arguments for :func:`iris.plot.plot`. String arguments can include + facets in curly brackets which will be derived from the corresponding + dataset, e.g., ``{project}``, ``{short_name}``, ``{exp}``. Examples: + ``default: {linestyle: '-', label: '{project}'}, CMIP6: {color: red, + linestyle: '--'}, OBS: {color: black}``. +pyplot_kwargs: dict, optional + Optional calls to functions of :mod:`matplotlib.pyplot`. Dictionary keys + are functions of :mod:`matplotlib.pyplot`. Dictionary values are used as + single argument for these functions. String arguments can include facets in + curly brackets which will be derived from the datasets plotted in the + corresponding plot, e.g., ``{short_name}``, ``{exp}``. Facets like + ``{project}`` that vary between the different datasets will be transformed + to something like ``ambiguous_project``. Examples: ``title: 'Awesome Plot + of {long_name}'``, ``xlabel: '{short_name}'``, ``xlim: [0, 5]``. + +Configuration options for plot type ``hovmoeller_z_vs_time`` +------------------------------------------------------------ +cbar_label: str, optional (default: '{short_name} [{units}]') + Colorbar label. Can include facets in curly brackets which will be derived + from the corresponding dataset, e.g., ``{project}``, ``{short_name}``, + ``{exp}``. +cbar_label_bias: str, optional (default: 'Δ{short_name} [{units}]') + Colorbar label for plotting biases. Can include facets in curly brackets + which will be derived from the corresponding dataset, e.g., ``{project}``, + ``{short_name}``, ``{exp}``. This option has no effect if no reference + dataset is given. +cbar_kwargs: dict, optional + Optional keyword arguments for :func:`matplotlib.pyplot.colorbar`. By + default, uses ``orientation: vertical``. +cbar_kwargs_bias: dict, optional + Optional keyword arguments for :func:`matplotlib.pyplot.colorbar` for + plotting biases. These keyword arguments update (and potentially overwrite) + the ``cbar_kwargs`` for the bias plot. This option has no effect if no + reference dataset is given. +common_cbar: bool, optional (default: False) + Use a common colorbar for the top panels (i.e., plots of the dataset and + the corresponding reference dataset) when using a reference dataset. If + neither ``vmin`` and ``vmix`` nor ``levels`` is given in ``plot_kwargs``, + the colorbar bounds are inferred from the dataset in the top left panel, + which might lead to an inappropriate colorbar for the reference dataset + (top right panel). Thus, the use of the ``plot_kwargs`` ``vmin`` and + ``vmax`` or ``levels`` is highly recommend when using this ``common_cbar: + true``. This option has no effect if no reference dataset is given. +fontsize: int, optional (default: 10) + Fontsize used for ticks, labels and titles. For the latter, use the given + fontsize plus 2. Does not affect suptitles. +log_y: bool, optional (default: True) + Use logarithmic Y-axis. +plot_func: str, optional (default: 'contourf') + Plot function used to plot the profiles. Must be a function of + :mod:`iris.plot` that supports plotting of 2D cubes with coordinates + latitude and altitude/air_pressure. +plot_kwargs: dict, optional + Optional keyword arguments for the plot function defined by ``plot_func``. + Dictionary keys are elements identified by ``facet_used_for_labels`` or + ``default``, e.g., ``CMIP6`` if ``facet_used_for_labels: project`` or + ``historical`` if ``facet_used_for_labels: exp``. Dictionary values are + dictionaries used as keyword arguments for the plot function defined by + ``plot_func``. String arguments can include facets in curly brackets which + will be derived from the corresponding dataset, e.g., ``{project}``, + ``{short_name}``, ``{exp}``. Examples: ``default: {levels: 2}, CMIP6: + {vmin: 200, vmax: 250}``. In addition to the normalization_ options + supported by the plot function, the option ``norm: centered`` can be + specified. In this case, the keywords ``vcenter`` and ``halfrange`` should + be used instead of ``vmin`` or ``vmax`` (see + :class:`~matplotlib.colors.CenteredNorm`). +plot_kwargs_bias: dict, optional + Optional keyword arguments for the plot function defined by ``plot_func`` + for plotting biases. These keyword arguments update (and potentially + overwrite) the ``plot_kwargs`` for the bias plot. This option has no effect + if no reference dataset is given. See option ``plot_kwargs`` for more + details. By default, uses ``cmap: bwr`` and ``norm: centered``. +pyplot_kwargs: dict, optional + Optional calls to functions of :mod:`matplotlib.pyplot`. Dictionary keys + are functions of :mod:`matplotlib.pyplot`. Dictionary values are used as + single argument for these functions. String arguments can include facets in + curly brackets which will be derived from the corresponding dataset, e.g., + ``{project}``, ``{short_name}``, ``{exp}``. Examples: ``title: 'Awesome + Plot of {long_name}'``, ``xlabel: '{short_name}'``, ``xlim: [0, 5]``. +rasterize: bool, optional (default: True) + If ``True``, use rasterization_ for profile plots to produce smaller files. + This is only relevant for vector graphics (e.g., ``output_file_type: + pdf,svg,ps``). +show_stats: bool, optional (default: True) + Show basic statistics on the plots. +show_y_minor_ticklabels: bool, optional (default: False) + Show tick labels for the minor ticks on the Y axis. +x_pos_stats_avg: float, optional (default: 0.01) + Text x-position of average (shown on the left) in Axes coordinates. Can be + adjusted to avoid overlap with the figure. Only relevant if ``show_stats: + true``. +x_pos_stats_bias: float, optional (default: 0.7) + Text x-position of bias statistics (shown on the right) in Axes + coordinates. Can be adjusted to avoid overlap with the figure. Only + relevant if ``show_stats: true``. +time_format: str, optional (default: None) + :func:`~datetime.datetime.strftime` format string that is used to format + the time axis using :class:`matplotlib.dates.DateFormatter`. If ``None``, + use the default formatting imposed by the iris plotting function. + +Configuration options for plot type ``hovmoeller_time_vs_lat_or_lon`` +--------------------------------------------------------------------- +cbar_label: str, optional (default: '{short_name} [{units}]') + Colorbar label. Can include facets in curly brackets which will be derived + from the corresponding dataset, e.g., ``{project}``, ``{short_name}``, + ``{exp}``. +cbar_label_bias: str, optional (default: 'Δ{short_name} [{units}]') + Colorbar label for plotting biases. Can include facets in curly brackets + which will be derived from the corresponding dataset, e.g., ``{project}``, + ``{short_name}``, ``{exp}``. This option has no effect if no reference + dataset is given. +cbar_kwargs: dict, optional + Optional keyword arguments for :func:`matplotlib.pyplot.colorbar`. By + default, uses ``orientation: vertical``. +cbar_kwargs_bias: dict, optional + Optional keyword arguments for :func:`matplotlib.pyplot.colorbar` for + plotting biases. These keyword arguments update (and potentially overwrite) + the ``cbar_kwargs`` for the bias plot. This option has no effect if no + reference dataset is given. +common_cbar: bool, optional (default: False) + Use a common colorbar for the top panels (i.e., plots of the dataset and + the corresponding reference dataset) when using a reference dataset. If + neither ``vmin`` and ``vmix`` nor ``levels`` is given in ``plot_kwargs``, + the colorbar bounds are inferred from the dataset in the top left panel, + which might lead to an inappropriate colorbar for the reference dataset + (top right panel). Thus, the use of the ``plot_kwargs`` ``vmin`` and + ``vmax`` or ``levels`` is highly recommend when using this ``common_cbar: + true``. This option has no effect if no reference dataset is given. +fontsize: int, optional (default: 10) + Fontsize used for ticks, labels and titles. For the latter, use the given + fontsize plus 2. Does not affect suptitles. +plot_func: str, optional (default: 'contourf') + Plot function used to plot the profiles. Must be a function of + :mod:`iris.plot` that supports plotting of 2D cubes with coordinates + latitude and height/air_pressure. +plot_kwargs: dict, optional + Optional keyword arguments for the plot function defined by ``plot_func``. + Dictionary keys are elements identified by ``facet_used_for_labels`` or + ``default``, e.g., ``CMIP6`` if ``facet_used_for_labels: project`` or + ``historical`` if ``facet_used_for_labels: exp``. Dictionary values are + dictionaries used as keyword arguments for the plot function defined by + ``plot_func``. String arguments can include facets in curly brackets which + will be derived from the corresponding dataset, e.g., ``{project}``, + ``{short_name}``, ``{exp}``. Examples: ``default: {levels: 2}, CMIP6: + {vmin: 200, vmax: 250}``. In addition to the normalization_ options + supported by the plot function, the option ``norm: centered`` can be + specified. In this case, the keywords ``vcenter`` and ``halfrange`` should + be used instead of ``vmin`` or ``vmax`` (see + :class:`~matplotlib.colors.CenteredNorm`). +plot_kwargs_bias: dict, optional + Optional keyword arguments for the plot function defined by ``plot_func`` + for plotting biases. These keyword arguments update (and potentially + overwrite) the ``plot_kwargs`` for the bias plot. This option has no effect + if no reference dataset is given. See option ``plot_kwargs`` for more + details. By default, uses ``cmap: bwr`` and ``norm: centered``. +pyplot_kwargs: dict, optional + Optional calls to functions of :mod:`matplotlib.pyplot`. Dictionary keys + are functions of :mod:`matplotlib.pyplot`. Dictionary values are used as + single argument for these functions. String arguments can include facets in + curly brackets which will be derived from the corresponding dataset, e.g., + ``{project}``, ``{short_name}``, ``{exp}``. Examples: ``title: 'Awesome + Plot of {long_name}'``, ``xlabel: '{short_name}'``, ``xlim: [0, 5]``. +rasterize: bool, optional (default: True) + If ``True``, use rasterization_ for profile plots to produce smaller files. + This is only relevant for vector graphics (e.g., ``output_file_type: + pdf,svg,ps``). +show_y_minor_ticks: bool, optional (default: True) + Show minor ticks for time on the Y axis. +show_x_minor_ticks: bool, optional (default: True) + Show minor ticks for latitude or longitude on the X axis. +time_format: str, optional (default: None) + :func:`~datetime.datetime.strftime` format string that is used to format + the time axis using :class:`matplotlib.dates.DateFormatter`. If ``None``, + use the default formatting imposed by the iris plotting function. + .. hint:: Extra arguments given to the recipe are ignored, so it is safe to use yaml anchors to share the configuration of common arguments with other monitor diagnostic script. +.. _rasterization: https://matplotlib.org/stable/gallery/misc/ + rasterization_demo.html +.. _normalization: https://matplotlib.org/stable/users/explain/colors/ + colormapnorms.html + """ import logging +import warnings from copy import deepcopy from pathlib import Path from pprint import pformat @@ -367,14 +597,21 @@ import cartopy.crs as ccrs import iris import matplotlib as mpl +import matplotlib.dates as mdates import matplotlib.pyplot as plt import numpy as np import seaborn as sns from iris.analysis.cartography import area_weights from iris.coord_categorisation import add_year from iris.coords import AuxCoord +from matplotlib.colors import CenteredNorm from matplotlib.gridspec import GridSpec -from matplotlib.ticker import FormatStrFormatter, LogLocator, NullFormatter +from matplotlib.ticker import ( + AutoMinorLocator, + FormatStrFormatter, + LogLocator, + NullFormatter, +) from sklearn.metrics import r2_score import esmvaltool.diag_scripts.shared.iris_helpers as ih @@ -397,16 +634,19 @@ def __init__(self, config): """Initialize class member.""" super().__init__(config) - # Get default stettings + # Get default settings self.cfg = deepcopy(self.cfg) self.cfg.setdefault('facet_used_for_labels', 'dataset') self.cfg.setdefault('figure_kwargs', {'constrained_layout': True}) + self.cfg.setdefault('group_variables_by', 'short_name') self.cfg.setdefault('savefig_kwargs', { 'bbox_inches': 'tight', 'dpi': 300, 'orientation': 'landscape', }) self.cfg.setdefault('seaborn_settings', {'style': 'ticks'}) + logger.info("Using facet '%s' to group variables", + self.cfg['group_variables_by']) logger.info("Using facet '%s' to create labels", self.cfg['facet_used_for_labels']) @@ -414,7 +654,7 @@ def __init__(self, config): self.input_data = self._load_and_preprocess_data() self.grouped_input_data = group_metadata( self.input_data, - 'short_name', + self.cfg['group_variables_by'], sort=self.cfg['facet_used_for_labels'], ) @@ -436,7 +676,10 @@ def __init__(self, config): 'annual_cycle', 'map', 'zonal_mean_profile', - '1d_profile' + '1d_profile', + 'variable_vs_lat', + 'hovmoeller_z_vs_time', + 'hovmoeller_time_vs_lat_or_lon', ] for (plot_type, plot_options) in self.plots.items(): if plot_type not in self.supported_plot_types: @@ -453,14 +696,15 @@ def __init__(self, config): self.plots[plot_type].setdefault('legend_kwargs', {}) self.plots[plot_type].setdefault('plot_kwargs', {}) self.plots[plot_type].setdefault('pyplot_kwargs', {}) + self.plots[plot_type].setdefault('time_format', None) - if plot_type == 'annual_cycle': + elif plot_type == 'annual_cycle': self.plots[plot_type].setdefault('gridline_kwargs', {}) self.plots[plot_type].setdefault('legend_kwargs', {}) self.plots[plot_type].setdefault('plot_kwargs', {}) self.plots[plot_type].setdefault('pyplot_kwargs', {}) - if plot_type == 'map': + elif plot_type == 'map': self.plots[plot_type].setdefault( 'cbar_label', '{short_name} [{units}]') self.plots[plot_type].setdefault( @@ -478,6 +722,9 @@ def __init__(self, config): self.plots[plot_type]['plot_kwargs_bias'].setdefault( 'cmap', 'bwr' ) + self.plots[plot_type]['plot_kwargs_bias'].setdefault( + 'norm', 'centered' + ) if 'projection' not in self.plots[plot_type]: self.plots[plot_type].setdefault('projection', 'Robinson') self.plots[plot_type].setdefault( @@ -491,7 +738,7 @@ def __init__(self, config): self.plots[plot_type].setdefault('x_pos_stats_avg', 0.0) self.plots[plot_type].setdefault('x_pos_stats_bias', 0.92) - if plot_type == 'zonal_mean_profile': + elif plot_type == 'zonal_mean_profile': self.plots[plot_type].setdefault( 'cbar_label', '{short_name} [{units}]') self.plots[plot_type].setdefault( @@ -509,6 +756,9 @@ def __init__(self, config): self.plots[plot_type]['plot_kwargs_bias'].setdefault( 'cmap', 'bwr' ) + self.plots[plot_type]['plot_kwargs_bias'].setdefault( + 'norm', 'centered' + ) self.plots[plot_type].setdefault('pyplot_kwargs', {}) self.plots[plot_type].setdefault('rasterize', True) self.plots[plot_type].setdefault('show_stats', True) @@ -518,7 +768,7 @@ def __init__(self, config): self.plots[plot_type].setdefault('x_pos_stats_avg', 0.01) self.plots[plot_type].setdefault('x_pos_stats_bias', 0.7) - if plot_type == '1d_profile': + elif plot_type == '1d_profile': self.plots[plot_type].setdefault('aspect_ratio', 1.5) self.plots[plot_type].setdefault('gridline_kwargs', {}) self.plots[plot_type].setdefault('legend_kwargs', {}) @@ -529,6 +779,69 @@ def __init__(self, config): self.plots[plot_type].setdefault( 'show_y_minor_ticklabels', False ) + elif plot_type == 'variable_vs_lat': + self.plots[plot_type].setdefault('gridline_kwargs', {}) + self.plots[plot_type].setdefault('legend_kwargs', {}) + self.plots[plot_type].setdefault('plot_kwargs', {}) + self.plots[plot_type].setdefault('pyplot_kwargs', {}) + + elif plot_type == 'hovmoeller_z_vs_time': + self.plots[plot_type].setdefault('cbar_label', + '{short_name} [{units}]') + self.plots[plot_type].setdefault('cbar_label_bias', + 'Δ{short_name} [{units}]') + self.plots[plot_type].setdefault('cbar_kwargs', + {'orientation': 'vertical'}) + self.plots[plot_type].setdefault('cbar_kwargs_bias', {}) + self.plots[plot_type].setdefault('common_cbar', False) + self.plots[plot_type].setdefault('fontsize', 10) + self.plots[plot_type].setdefault('log_y', True) + self.plots[plot_type].setdefault('plot_func', 'contourf') + self.plots[plot_type].setdefault('plot_kwargs', {}) + self.plots[plot_type].setdefault('plot_kwargs_bias', {}) + self.plots[plot_type]['plot_kwargs_bias'].setdefault( + 'cmap', 'bwr') + self.plots[plot_type]['plot_kwargs_bias'].setdefault( + 'norm', 'centered' + ) + self.plots[plot_type].setdefault('pyplot_kwargs', {}) + self.plots[plot_type].setdefault('rasterize', True) + self.plots[plot_type].setdefault('show_stats', True) + self.plots[plot_type].setdefault('show_y_minor_ticklabels', + False) + self.plots[plot_type].setdefault('time_format', None) + self.plots[plot_type].setdefault('x_pos_stats_avg', 0.01) + self.plots[plot_type].setdefault('x_pos_stats_bias', 0.7) + + elif plot_type == 'hovmoeller_time_vs_lat_or_lon': + self.plots[plot_type].setdefault( + 'cbar_label', '{short_name} [{units}]') + self.plots[plot_type].setdefault( + 'cbar_label_bias', 'Δ{short_name} [{units}]') + self.plots[plot_type].setdefault( + 'cbar_kwargs', {'orientation': 'vertical'} + ) + self.plots[plot_type].setdefault('cbar_kwargs_bias', {}) + self.plots[plot_type].setdefault('common_cbar', False) + self.plots[plot_type].setdefault('fontsize', 10) + self.plots[plot_type].setdefault('plot_func', 'contourf') + self.plots[plot_type].setdefault('plot_kwargs', {}) + self.plots[plot_type].setdefault('plot_kwargs_bias', {}) + self.plots[plot_type]['plot_kwargs_bias'].setdefault( + 'cmap', 'bwr' + ) + self.plots[plot_type]['plot_kwargs_bias'].setdefault( + 'norm', 'centered' + ) + self.plots[plot_type].setdefault('pyplot_kwargs', {}) + self.plots[plot_type].setdefault('rasterize', True) + self.plots[plot_type].setdefault( + 'show_y_minor_ticks', True + ) + self.plots[plot_type].setdefault( + 'show_x_minor_ticks', True + ) + self.plots[plot_type].setdefault('time_format', None) # Check that facet_used_for_labels is present for every dataset for dataset in self.input_data: @@ -587,10 +900,10 @@ def _add_stats(self, plot_type, axes, dim_coords, dataset, # Different options for the different plots types fontsize = 6.0 y_pos = 0.95 - if plot_type == 'map': - x_pos_bias = self.plots[plot_type]['x_pos_stats_bias'] - x_pos = self.plots[plot_type]['x_pos_stats_avg'] - elif plot_type in ['zonal_mean_profile']: + if all([ + 'x_pos_stats_avg' in self.plots[plot_type], + 'x_pos_stats_bias' in self.plots[plot_type], + ]): x_pos_bias = self.plots[plot_type]['x_pos_stats_bias'] x_pos = self.plots[plot_type]['x_pos_stats_avg'] else: @@ -757,9 +1070,17 @@ def _get_plot_kwargs(self, plot_type, dataset, bias=False): plot_kwargs[key] = val # Default settings for different plot types - if plot_type in ('timeseries', 'annual_cycle', '1d_profile'): + if plot_type in ('timeseries', 'annual_cycle', '1d_profile', + 'variable_vs_lat'): plot_kwargs.setdefault('label', label) + if plot_kwargs.get('norm') == 'centered': + norm = CenteredNorm( + vcenter=plot_kwargs.pop('vcenter', 0.0), + halfrange=plot_kwargs.pop('halfrange', None), + ) + plot_kwargs['norm'] = norm + return deepcopy(plot_kwargs) def _load_and_preprocess_data(self): @@ -822,6 +1143,7 @@ def _plot_map_with_ref(self, plot_func, dataset, ref_dataset): axes_data.gridlines(**gridline_kwargs) axes_data.set_title(self._get_label(dataset), pad=3.0) self._add_stats(plot_type, axes_data, dim_coords_dat, dataset) + self._process_pyplot_kwargs(plot_type, dataset) # Plot reference dataset (top right) # Note: make sure to use the same vmin and vmax than the top left @@ -838,6 +1160,7 @@ def _plot_map_with_ref(self, plot_func, dataset, ref_dataset): axes_ref.gridlines(**gridline_kwargs) axes_ref.set_title(self._get_label(ref_dataset), pad=3.0) self._add_stats(plot_type, axes_ref, dim_coords_ref, ref_dataset) + self._process_pyplot_kwargs(plot_type, ref_dataset) # Add colorbar(s) self._add_colorbar(plot_type, plot_data, plot_ref, axes_data, @@ -981,6 +1304,7 @@ def _plot_zonal_mean_profile_with_ref(self, plot_func, dataset, else: axes_data.get_yaxis().set_minor_formatter(NullFormatter()) self._add_stats(plot_type, axes_data, dim_coords_dat, dataset) + self._process_pyplot_kwargs(plot_type, dataset) # Plot reference dataset (top right) # Note: make sure to use the same vmin and vmax than the top left @@ -995,6 +1319,7 @@ def _plot_zonal_mean_profile_with_ref(self, plot_func, dataset, axes_ref.set_title(self._get_label(ref_dataset), pad=3.0) plt.setp(axes_ref.get_yticklabels(), visible=False) self._add_stats(plot_type, axes_ref, dim_coords_ref, ref_dataset) + self._process_pyplot_kwargs(plot_type, ref_dataset) # Add colorbar(s) self._add_colorbar(plot_type, plot_data, plot_ref, axes_data, @@ -1105,49 +1430,401 @@ def _plot_zonal_mean_profile_without_ref(self, plot_func, dataset): return (plot_path, {netcdf_path: cube}) - def _process_pyplot_kwargs(self, plot_type, dataset): - """Process functions for :mod:`matplotlib.pyplot`.""" - pyplot_kwargs = self.plots[plot_type]['pyplot_kwargs'] - for (func, arg) in pyplot_kwargs.items(): - if isinstance(arg, str): - arg = self._fill_facet_placeholders( - arg, - dataset, - f"pyplot_kwargs of {plot_type} '{func}: {arg}'", - ) - if arg is None: - getattr(plt, func)() - else: - getattr(plt, func)(arg) - - @staticmethod - def _check_cube_dimensions(cube, plot_type): - """Check that cube has correct dimensional variables.""" - expected_dimensions_dict = { - 'annual_cycle': (['month_number'],), - 'map': (['latitude', 'longitude'],), - 'zonal_mean_profile': (['latitude', 'air_pressure'], - ['latitude', 'altitude']), - 'timeseries': (['time'],), - '1d_profile': (['air_pressure'], - ['altitude']), + def _plot_hovmoeller_z_vs_time_without_ref(self, plot_func, dataset): + """Plot Hovmoeller Z vs. time for single dataset without reference.""" + plot_type = 'hovmoeller_z_vs_time' + logger.info( + "Plotting Hovmoeller Z vs. time without reference dataset" + " for '%s'", self._get_label(dataset)) - } - if plot_type not in expected_dimensions_dict: - raise NotImplementedError(f"plot_type '{plot_type}' not supported") - expected_dimensions = expected_dimensions_dict[plot_type] - for dims in expected_dimensions: - cube_dims = [cube.coords(dim, dim_coords=True) for dim in dims] - if all(cube_dims) and cube.ndim == len(dims): - return dims - expected_dims_str = ' or '.join( - [str(dims) for dims in expected_dimensions] - ) - raise ValueError( - f"Expected cube that exactly has the dimensional coordinates " - f"{expected_dims_str}, got {cube.summary(shorten=True)}") + # Make sure that the data has the correct dimensions + cube = dataset['cube'] + dim_coords_dat = self._check_cube_dimensions(cube, plot_type) - @staticmethod + # Create plot with desired settings + with mpl.rc_context(self._get_custom_mpl_rc_params(plot_type)): + fig = plt.figure(**self.cfg['figure_kwargs']) + axes = fig.add_subplot() + plot_kwargs = self._get_plot_kwargs(plot_type, dataset) + plot_kwargs['axes'] = axes + plot_hovmoeller = plot_func(cube, **plot_kwargs) + + # Print statistics if desired + self._add_stats(plot_type, axes, dim_coords_dat, dataset) + + # Setup colorbar + fontsize = self.plots[plot_type]['fontsize'] + colorbar = fig.colorbar(plot_hovmoeller, + ax=axes, + **self._get_cbar_kwargs(plot_type)) + colorbar.set_label(self._get_cbar_label(plot_type, dataset), + fontsize=fontsize) + colorbar.ax.tick_params(labelsize=fontsize) + + # Customize plot + axes.set_title(self._get_label(dataset)) + fig.suptitle(f"{dataset['long_name']} ({dataset['start_year']}-" + f"{dataset['end_year']})") + z_coord = cube.coord(axis='Z') + axes.set_ylabel(f'{z_coord.long_name} [{z_coord.units}]') + if self.plots[plot_type]['log_y']: + axes.set_yscale('log') + axes.get_yaxis().set_major_formatter( + FormatStrFormatter('%.1f')) + if self.plots[plot_type]['show_y_minor_ticklabels']: + axes.get_yaxis().set_minor_formatter( + FormatStrFormatter('%.1f')) + else: + axes.get_yaxis().set_minor_formatter(NullFormatter()) + if self.plots[plot_type]['time_format'] is not None: + axes.get_xaxis().set_major_formatter( + mdates.DateFormatter(self.plots[plot_type]['time_format'])) + axes.set_xlabel('time') + self._process_pyplot_kwargs(plot_type, dataset) + + # Rasterization + if self.plots[plot_type]['rasterize']: + self._set_rasterized([axes]) + + # File paths + plot_path = self.get_plot_path(plot_type, dataset) + netcdf_path = get_diagnostic_filename(Path(plot_path).stem, self.cfg) + + return (plot_path, {netcdf_path: cube}) + + def _plot_hovmoeller_z_vs_time_with_ref(self, plot_func, dataset, + ref_dataset): + """Plot Hovmoeller Z vs. time for single dataset with reference.""" + plot_type = 'hovmoeller_z_vs_time' + logger.info( + "Plotting Hovmoeller z vs. time with reference dataset" + " '%s' for '%s'", self._get_label(ref_dataset), + self._get_label(dataset)) + + # Make sure that the data has the correct dimensions + cube = dataset['cube'] + ref_cube = ref_dataset['cube'] + dim_coords_dat = self._check_cube_dimensions(cube, plot_type) + dim_coords_ref = self._check_cube_dimensions(ref_cube, plot_type) + + # Create single figure with multiple axes + with mpl.rc_context(self._get_custom_mpl_rc_params(plot_type)): + fig = plt.figure(**self.cfg['figure_kwargs']) + gridspec = GridSpec(5, + 4, + figure=fig, + height_ratios=[1.0, 1.0, 0.4, 1.0, 1.0]) + + # Options used for all subplots + plot_kwargs = self._get_plot_kwargs(plot_type, dataset) + fontsize = self.plots[plot_type]['fontsize'] + + # Plot dataset (top left) + axes_data = fig.add_subplot(gridspec[0:2, 0:2]) + plot_kwargs['axes'] = axes_data + plot_data = plot_func(cube, **plot_kwargs) + axes_data.set_title(self._get_label(dataset), pad=3.0) + z_coord = cube.coord(axis='Z') + axes_data.set_ylabel(f'{z_coord.long_name} [{z_coord.units}]') + if self.plots[plot_type]['log_y']: + axes_data.set_yscale('log') + axes_data.get_yaxis().set_major_formatter( + FormatStrFormatter('%.1f')) + if self.plots[plot_type]['show_y_minor_ticklabels']: + axes_data.get_yaxis().set_minor_formatter( + FormatStrFormatter('%.1f')) + else: + axes_data.get_yaxis().set_minor_formatter(NullFormatter()) + if self.plots[plot_type]['time_format'] is not None: + axes_data.get_xaxis().set_major_formatter( + mdates.DateFormatter(self.plots[plot_type]['time_format'])) + self._add_stats(plot_type, axes_data, dim_coords_dat, dataset) + self._process_pyplot_kwargs(plot_type, dataset) + + # Plot reference dataset (top right) + # Note: make sure to use the same vmin and vmax than the top left + # plot if a common colorbar is desired + axes_ref = fig.add_subplot(gridspec[0:2, 2:4], + sharex=axes_data, + sharey=axes_data) + plot_kwargs['axes'] = axes_ref + if self.plots[plot_type]['common_cbar']: + plot_kwargs.setdefault('vmin', plot_data.get_clim()[0]) + plot_kwargs.setdefault('vmax', plot_data.get_clim()[1]) + plot_ref = plot_func(ref_cube, **plot_kwargs) + axes_ref.set_title(self._get_label(ref_dataset), pad=3.0) + plt.setp(axes_ref.get_yticklabels(), visible=False) + self._add_stats(plot_type, axes_ref, dim_coords_ref, ref_dataset) + self._process_pyplot_kwargs(plot_type, ref_dataset) + + # Add colorbar(s) + self._add_colorbar(plot_type, plot_data, plot_ref, axes_data, + axes_ref, dataset, ref_dataset) + + # Plot bias (bottom center) + bias_cube = cube - ref_cube + axes_bias = fig.add_subplot(gridspec[3:5, 1:3], + sharex=axes_data, + sharey=axes_data) + plot_kwargs_bias = self._get_plot_kwargs(plot_type, + dataset, + bias=True) + plot_kwargs_bias['axes'] = axes_bias + plot_bias = plot_func(bias_cube, **plot_kwargs_bias) + axes_bias.set_title( + f"{self._get_label(dataset)} - {self._get_label(ref_dataset)}", + pad=3.0, + ) + axes_bias.set_xlabel('time') + axes_bias.set_ylabel(f'{z_coord.long_name} [{z_coord.units}]') + cbar_kwargs_bias = self._get_cbar_kwargs(plot_type, bias=True) + cbar_bias = fig.colorbar(plot_bias, + ax=axes_bias, + **cbar_kwargs_bias) + cbar_bias.set_label( + self._get_cbar_label(plot_type, dataset, bias=True), + fontsize=fontsize, + ) + cbar_bias.ax.tick_params(labelsize=fontsize) + self._add_stats(plot_type, axes_bias, dim_coords_dat, dataset, + ref_dataset) + + # Customize plot + fig.suptitle(f"{dataset['long_name']} ({dataset['start_year']}-" + f"{dataset['end_year']})") + self._process_pyplot_kwargs(plot_type, dataset) + + # Rasterization + if self.plots[plot_type]['rasterize']: + self._set_rasterized([axes_data, axes_ref, axes_bias]) + + # File paths + plot_path = self.get_plot_path(plot_type, dataset) + netcdf_path = (get_diagnostic_filename( + Path(plot_path).stem + "_{pos}", self.cfg)) + netcdf_paths = { + netcdf_path.format(pos='top_left'): cube, + netcdf_path.format(pos='top_right'): ref_cube, + netcdf_path.format(pos='bottom'): bias_cube, + } + + return (plot_path, netcdf_paths) + + def _plot_hovmoeller_time_vs_lat_or_lon_with_ref(self, plot_func, dataset, + ref_dataset): + """Plot the hovmoeller profile for single dataset with reference.""" + plot_type = 'hovmoeller_time_vs_lat_or_lon' + logger.info("Plotting Hovmoeller plots with reference dataset" + " '%s' for '%s'", + self._get_label(ref_dataset), self._get_label(dataset)) + + # Make sure that the data has the correct dimensions + cube = dataset['cube'] + ref_cube = ref_dataset['cube'] + dim_coords_dat = self._check_cube_dimensions(cube, plot_type) + self._check_cube_dimensions(ref_cube, plot_type) + + # Create single figure with multiple axes + with mpl.rc_context(self._get_custom_mpl_rc_params(plot_type)): + fig = plt.figure(**self.cfg['figure_kwargs']) + gridspec = GridSpec(5, 4, figure=fig, + height_ratios=[1.0, 1.0, 0.4, 1.0, 1.0]) + + # Options used for all subplots + plot_kwargs = self._get_plot_kwargs(plot_type, dataset) + fontsize = self.plots[plot_type]['fontsize'] + + # Plot dataset (top left) + axes_data = fig.add_subplot(gridspec[0:2, 0:2]) + plot_kwargs['axes'] = axes_data + coord_names = [coord[0].name() for coord in cube.dim_coords] + if coord_names[0] == "time": + coord_names.reverse() + plot_kwargs['coords'] = coord_names + plot_data = plot_func(cube, **plot_kwargs) + axes_data.set_title(self._get_label(dataset), pad=3.0) + axes_data.set_ylabel('time') + if self.plots[plot_type]['time_format'] is not None: + axes_data.get_yaxis().set_major_formatter(mdates.DateFormatter( + self.plots[plot_type]['time_format'])) + if self.plots[plot_type]['show_y_minor_ticks']: + axes_data.get_yaxis().set_minor_locator(AutoMinorLocator()) + if self.plots[plot_type]['show_x_minor_ticks']: + axes_data.get_xaxis().set_minor_locator(AutoMinorLocator()) + self._process_pyplot_kwargs(plot_type, dataset) + + # Plot reference dataset (top right) + # Note: make sure to use the same vmin and vmax than the top left + # plot if a common colorbar is desired + axes_ref = fig.add_subplot(gridspec[0:2, 2:4], sharex=axes_data, + sharey=axes_data) + plot_kwargs['axes'] = axes_ref + if self.plots[plot_type]['common_cbar']: + plot_kwargs.setdefault('vmin', plot_data.get_clim()[0]) + plot_kwargs.setdefault('vmax', plot_data.get_clim()[1]) + plot_ref = plot_func(ref_cube, **plot_kwargs) + axes_ref.set_title(self._get_label(ref_dataset), pad=3.0) + plt.setp(axes_ref.get_yticklabels(), visible=False) + self._process_pyplot_kwargs(plot_type, ref_dataset) + + # Add colorbar(s) + self._add_colorbar(plot_type, plot_data, plot_ref, axes_data, + axes_ref, dataset, ref_dataset) + + # Plot bias (bottom center) + bias_cube = cube - ref_cube + axes_bias = fig.add_subplot(gridspec[3:5, 1:3], sharex=axes_data, + sharey=axes_data) + plot_kwargs_bias = self._get_plot_kwargs(plot_type, dataset, + bias=True) + plot_kwargs_bias['axes'] = axes_bias + plot_kwargs_bias['coords'] = coord_names + plot_bias = plot_func(bias_cube, **plot_kwargs_bias) + axes_bias.set_title( + f"{self._get_label(dataset)} - {self._get_label(ref_dataset)}", + pad=3.0, + ) + axes_bias.set_ylabel('time') + if 'latitude' in dim_coords_dat: + axes_bias.set_xlabel('latitude [°N]') + elif 'longitude' in dim_coords_dat: + axes_bias.set_xlabel('longitude [°E]') + cbar_kwargs_bias = self._get_cbar_kwargs(plot_type, bias=True) + cbar_bias = fig.colorbar(plot_bias, ax=axes_bias, + **cbar_kwargs_bias) + cbar_bias.set_label( + self._get_cbar_label(plot_type, dataset, bias=True), + fontsize=fontsize, + ) + cbar_bias.ax.tick_params(labelsize=fontsize) + + # Customize plot + fig.suptitle(f"{dataset['long_name']} ({dataset['start_year']}-" + f"{dataset['end_year']})") + self._process_pyplot_kwargs(plot_type, dataset) + + # Rasterization + if self.plots[plot_type]['rasterize']: + self._set_rasterized([axes_data, axes_ref, axes_bias]) + + # File paths + plot_path = self.get_plot_path(plot_type, dataset) + netcdf_path = ( + get_diagnostic_filename(Path(plot_path).stem + "_{pos}", self.cfg) + ) + netcdf_paths = { + netcdf_path.format(pos='top_left'): cube, + netcdf_path.format(pos='top_right'): ref_cube, + netcdf_path.format(pos='bottom'): bias_cube, + } + + return (plot_path, netcdf_paths) + + def _plot_hovmoeller_time_vs_lat_or_lon_without_ref(self, plot_func, + dataset): + """Plot time vs zonal or meridional Hovmoeller without reference.""" + plot_type = 'hovmoeller_time_vs_lat_or_lon' + logger.info("Plotting Hovmoeller plots without reference dataset" + " for '%s'", self._get_label(dataset)) + + # Make sure that the data has the correct dimensions + cube = dataset['cube'] + dim_coords_dat = self._check_cube_dimensions(cube, plot_type) + + # Create plot with desired settings + with mpl.rc_context(self._get_custom_mpl_rc_params(plot_type)): + fig = plt.figure(**self.cfg['figure_kwargs']) + axes = fig.add_subplot() + plot_kwargs = self._get_plot_kwargs(plot_type, dataset) + plot_kwargs['axes'] = axes + + # Make sure time is on y-axis + plot_kwargs['coords'] = list(reversed(dim_coords_dat)) + plot_hovmoeller = plot_func(cube, **plot_kwargs) + + # Setup colorbar + fontsize = self.plots[plot_type]['fontsize'] + colorbar = fig.colorbar(plot_hovmoeller, ax=axes, + **self._get_cbar_kwargs(plot_type)) + colorbar.set_label(self._get_cbar_label(plot_type, dataset), + fontsize=fontsize) + colorbar.ax.tick_params(labelsize=fontsize) + + # Customize plot + axes.set_title(self._get_label(dataset)) + fig.suptitle(f"{dataset['long_name']} ({dataset['start_year']}-" + f"{dataset['end_year']})") + if 'latitude' in dim_coords_dat: + axes.set_xlabel('latitude [°N]') + elif 'longitude' in dim_coords_dat: + axes.set_xlabel('longitude [°E]') + axes.set_ylabel('time') + if self.plots[plot_type]['time_format'] is not None: + axes.get_yaxis().set_major_formatter(mdates.DateFormatter( + self.plots[plot_type]['time_format']) + ) + if self.plots[plot_type]['show_y_minor_ticks']: + axes.get_yaxis().set_minor_locator(AutoMinorLocator()) + if self.plots[plot_type]['show_x_minor_ticks']: + axes.get_xaxis().set_minor_locator(AutoMinorLocator()) + self._process_pyplot_kwargs(plot_type, dataset) + + # Rasterization + if self.plots[plot_type]['rasterize']: + self._set_rasterized([axes]) + + # File paths + plot_path = self.get_plot_path(plot_type, dataset) + netcdf_path = get_diagnostic_filename(Path(plot_path).stem, self.cfg) + return (plot_path, {netcdf_path: cube}) + + def _process_pyplot_kwargs(self, plot_type, dataset): + """Process functions for :mod:`matplotlib.pyplot`.""" + pyplot_kwargs = self.plots[plot_type]['pyplot_kwargs'] + for (func, arg) in pyplot_kwargs.items(): + if isinstance(arg, str): + arg = self._fill_facet_placeholders( + arg, + dataset, + f"pyplot_kwargs of {plot_type} '{func}: {arg}'", + ) + if arg is None: + getattr(plt, func)() + else: + getattr(plt, func)(arg) + + @staticmethod + def _check_cube_dimensions(cube, plot_type): + """Check that cube has correct dimensional variables.""" + expected_dimensions_dict = { + 'annual_cycle': (['month_number'],), + 'map': (['latitude', 'longitude'],), + 'zonal_mean_profile': (['latitude', 'air_pressure'], + ['latitude', 'altitude']), + 'timeseries': (['time'],), + '1d_profile': (['air_pressure'], + ['altitude']), + 'variable_vs_lat': (['latitude'],), + 'hovmoeller_z_vs_time': (['time', 'air_pressure'], + ['time', 'altitude']), + 'hovmoeller_time_vs_lat_or_lon': (['time', 'latitude'], + ['time', 'longitude']), + } + if plot_type not in expected_dimensions_dict: + raise NotImplementedError(f"plot_type '{plot_type}' not supported") + expected_dimensions = expected_dimensions_dict[plot_type] + for dims in expected_dimensions: + cube_dims = [cube.coords(dim, dim_coords=True) for dim in dims] + if all(cube_dims) and cube.ndim == len(dims): + return dims + expected_dims_str = ' or '.join( + [str(dims) for dims in expected_dimensions] + ) + raise ValueError( + f"Expected cube that exactly has the dimensional coordinates " + f"{expected_dims_str}, got {cube.summary(shorten=True)}") + + @staticmethod def _fill_facet_placeholders(string, dataset, description): """Fill facet placeholders.""" try: @@ -1170,21 +1847,21 @@ def _get_multi_dataset_facets(datasets): multi_dataset_facets[key] = f'ambiguous_{key}' return multi_dataset_facets - @staticmethod - def _get_reference_dataset(datasets, short_name): + def _get_reference_dataset(self, datasets): """Extract reference dataset.""" + variable = datasets[0][self.cfg['group_variables_by']] ref_datasets = [d for d in datasets if d.get('reference_for_monitor_diags', False)] if len(ref_datasets) > 1: raise ValueError( f"Expected at most 1 reference dataset (with " f"'reference_for_monitor_diags: true' for variable " - f"'{short_name}', got {len(ref_datasets):d}") + f"'{variable}', got {len(ref_datasets):d}") if ref_datasets: return ref_datasets[0] return None - def create_timeseries_plot(self, datasets, short_name): + def create_timeseries_plot(self, datasets): """Create time series plot.""" plot_type = 'timeseries' if plot_type not in self.plots: @@ -1226,8 +1903,15 @@ def create_timeseries_plot(self, datasets, short_name): # Default plot appearance multi_dataset_facets = self._get_multi_dataset_facets(datasets) axes.set_title(multi_dataset_facets['long_name']) - axes.set_xlabel('Time') - axes.set_ylabel(f"{short_name} [{multi_dataset_facets['units']}]") + axes.set_xlabel('time') + # apply time formatting + if self.plots[plot_type]['time_format'] is not None: + axes.get_xaxis().set_major_formatter( + mdates.DateFormatter(self.plots[plot_type]['time_format'])) + axes.set_ylabel( + f"{multi_dataset_facets[self.cfg['group_variables_by']]} " + f"[{multi_dataset_facets['units']}]" + ) gridline_kwargs = self._get_gridline_kwargs(plot_type) if gridline_kwargs is not False: axes.grid(**gridline_kwargs) @@ -1267,7 +1951,7 @@ def create_timeseries_plot(self, datasets, short_name): provenance_logger.log(plot_path, provenance_record) provenance_logger.log(netcdf_path, provenance_record) - def create_annual_cycle_plot(self, datasets, short_name): + def create_annual_cycle_plot(self, datasets): """Create annual cycle plot.""" plot_type = 'annual_cycle' if plot_type not in self.plots: @@ -1298,7 +1982,10 @@ def create_annual_cycle_plot(self, datasets, short_name): multi_dataset_facets = self._get_multi_dataset_facets(datasets) axes.set_title(multi_dataset_facets['long_name']) axes.set_xlabel('Month') - axes.set_ylabel(f"{short_name} [{multi_dataset_facets['units']}]") + axes.set_ylabel( + f"{multi_dataset_facets[self.cfg['group_variables_by']]} " + f"[{multi_dataset_facets['units']}]" + ) axes.set_xticks(range(1, 13), [str(m) for m in range(1, 13)]) gridline_kwargs = self._get_gridline_kwargs(plot_type) if gridline_kwargs is not False: @@ -1339,7 +2026,7 @@ def create_annual_cycle_plot(self, datasets, short_name): provenance_logger.log(plot_path, provenance_record) provenance_logger.log(netcdf_path, provenance_record) - def create_map_plot(self, datasets, short_name): + def create_map_plot(self, datasets): """Create map plot.""" plot_type = 'map' if plot_type not in self.plots: @@ -1349,7 +2036,7 @@ def create_map_plot(self, datasets, short_name): raise ValueError(f"No input data to plot '{plot_type}' given") # Get reference dataset if possible - ref_dataset = self._get_reference_dataset(datasets, short_name) + ref_dataset = self._get_reference_dataset(datasets) if ref_dataset is None: logger.info("Plotting %s without reference dataset", plot_type) else: @@ -1415,7 +2102,7 @@ def create_map_plot(self, datasets, short_name): for netcdf_path in netcdf_paths: provenance_logger.log(netcdf_path, provenance_record) - def create_zonal_mean_profile_plot(self, datasets, short_name): + def create_zonal_mean_profile_plot(self, datasets): """Create zonal mean profile plot.""" plot_type = 'zonal_mean_profile' if plot_type not in self.plots: @@ -1425,7 +2112,7 @@ def create_zonal_mean_profile_plot(self, datasets, short_name): raise ValueError(f"No input data to plot '{plot_type}' given") # Get reference dataset if possible - ref_dataset = self._get_reference_dataset(datasets, short_name) + ref_dataset = self._get_reference_dataset(datasets) if ref_dataset is None: logger.info("Plotting %s without reference dataset", plot_type) else: @@ -1493,7 +2180,7 @@ def create_zonal_mean_profile_plot(self, datasets, short_name): for netcdf_path in netcdf_paths: provenance_logger.log(netcdf_path, provenance_record) - def create_1d_profile_plot(self, datasets, short_name): + def create_1d_profile_plot(self, datasets): """Create 1D profile plot.""" plot_type = '1d_profile' if plot_type not in self.plots: @@ -1525,7 +2212,10 @@ def create_1d_profile_plot(self, datasets, short_name): # Default plot appearance axes.set_title(multi_dataset_facets['long_name']) - axes.set_xlabel(f"{short_name} [{multi_dataset_facets['units']}]") + axes.set_xlabel( + f"{multi_dataset_facets[self.cfg['group_variables_by']]} " + f"[{multi_dataset_facets['units']}]" + ) z_coord = cube.coord(axis='Z') axes.set_ylabel(f'{z_coord.long_name} [{z_coord.units}]') @@ -1595,21 +2285,252 @@ def create_1d_profile_plot(self, datasets, short_name): provenance_logger.log(plot_path, provenance_record) provenance_logger.log(netcdf_path, provenance_record) + def create_variable_vs_lat_plot(self, datasets): + """Create Variable as a function of latitude.""" + plot_type = 'variable_vs_lat' + if plot_type not in self.plots: + return + if not datasets: + raise ValueError(f"No input data to plot '{plot_type}' given") + logger.info("Plotting %s", plot_type) + fig = plt.figure(**self.cfg['figure_kwargs']) + axes = fig.add_subplot() + + # Plot all datasets in one single figure + ancestors = [] + cubes = {} + for dataset in datasets: + ancestors.append(dataset['filename']) + cube = dataset['cube'] + cubes[self._get_label(dataset)] = cube + self._check_cube_dimensions(cube, plot_type) + + # Plot data + plot_kwargs = self._get_plot_kwargs(plot_type, dataset) + plot_kwargs['axes'] = axes + iris.plot.plot(cube, **plot_kwargs) + + # Default plot appearance + multi_dataset_facets = self._get_multi_dataset_facets(datasets) + axes.set_title(multi_dataset_facets['long_name']) + axes.set_xlabel('latitude [°N]') + axes.set_ylabel( + f"{multi_dataset_facets[self.cfg['group_variables_by']]} " + f"[{multi_dataset_facets['units']}]" + ) + gridline_kwargs = self._get_gridline_kwargs(plot_type) + if gridline_kwargs is not False: + axes.grid(**gridline_kwargs) + + # Legend + legend_kwargs = self.plots[plot_type]['legend_kwargs'] + if legend_kwargs is not False: + axes.legend(**legend_kwargs) + + # Customize plot appearance + self._process_pyplot_kwargs(plot_type, multi_dataset_facets) + + # Save plot + plot_path = self.get_plot_path(plot_type, multi_dataset_facets) + fig.savefig(plot_path, **self.cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Save netCDF file + netcdf_path = get_diagnostic_filename(Path(plot_path).stem, self.cfg) + var_attrs = { + n: datasets[0][n] for n in ('short_name', 'long_name', 'units') + } + io.save_1d_data(cubes, netcdf_path, 'latitude', var_attrs) + + # Provenance tracking + caption = (f"{multi_dataset_facets['long_name']} vs. latitude for " + f"various datasets.") + provenance_record = { + 'ancestors': ancestors, + 'authors': ['sarauer_ellen'], + 'caption': caption, + 'plot_types': ['line'], + 'long_names': [var_attrs['long_name']], + } + with ProvenanceLogger(self.cfg) as provenance_logger: + provenance_logger.log(plot_path, provenance_record) + provenance_logger.log(netcdf_path, provenance_record) + + def create_hovmoeller_z_vs_time_plot(self, datasets): + """Create Hovmoeller Z vs. time plot.""" + plot_type = 'hovmoeller_z_vs_time' + if plot_type not in self.plots: + return + + if not datasets: + raise ValueError(f"No input data to plot '{plot_type}' given") + + # Get reference dataset if possible + ref_dataset = self._get_reference_dataset(datasets) + if ref_dataset is None: + logger.info("Plotting %s without reference dataset", plot_type) + else: + logger.info("Plotting %s with reference dataset '%s'", plot_type, + self._get_label(ref_dataset)) + + # Get plot function + plot_func = self._get_plot_func(plot_type) + + # Create a single plot for each dataset (incl. reference dataset if + # given) + for dataset in datasets: + if dataset == ref_dataset: + continue + ancestors = [dataset['filename']] + if ref_dataset is None: + (plot_path, + netcdf_paths) = (self._plot_hovmoeller_z_vs_time_without_ref( + plot_func, dataset)) + caption = ( + f"Hovmoeller Z vs. time plot of {dataset['long_name']} " + f"of dataset " + f"{dataset['dataset']} (project {dataset['project']}) " + f"from {dataset['start_year']} to {dataset['end_year']}.") + else: + (plot_path, + netcdf_paths) = (self._plot_hovmoeller_z_vs_time_with_ref( + plot_func, dataset, ref_dataset)) + caption = ( + f"Hovmoeller Z vs. time plot of {dataset['long_name']} " + f"of dataset " + f"{dataset['dataset']} (project {dataset['project']}) " + f"including bias relative to {ref_dataset['dataset']} " + f"(project {ref_dataset['project']}) from " + f"{dataset['start_year']} to {dataset['end_year']}.") + ancestors.append(ref_dataset['filename']) + + # If statistics are shown add a brief description to the caption + if self.plots[plot_type]['show_stats']: + caption += ( + " The number in the top left corner corresponds to the " + "spatiotemporal mean.") + + # Save plot + plt.savefig(plot_path, **self.cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Save netCDFs + for (netcdf_path, cube) in netcdf_paths.items(): + io.iris_save(cube, netcdf_path) + + # Provenance tracking + provenance_record = { + 'ancestors': ancestors, + 'authors': ['kuehbacher_birgit', 'heuer_helge'], + 'caption': caption, + 'plot_types': ['vert'], + 'long_names': [dataset['long_name']], + } + with ProvenanceLogger(self.cfg) as provenance_logger: + provenance_logger.log(plot_path, provenance_record) + for netcdf_path in netcdf_paths: + provenance_logger.log(netcdf_path, provenance_record) + + def create_hovmoeller_time_vs_lat_or_lon_plot(self, datasets): + """Create the Hovmoeller plot with time vs latitude or longitude.""" + plot_type = 'hovmoeller_time_vs_lat_or_lon' + if plot_type not in self.plots: + return + + if not datasets: + raise ValueError(f"No input data to plot '{plot_type}' given") + + # Get reference dataset if possible + ref_dataset = self._get_reference_dataset(datasets) + if ref_dataset is None: + logger.info("Plotting %s without reference dataset", plot_type) + else: + logger.info("Plotting %s with reference dataset '%s'", plot_type, + self._get_label(ref_dataset)) + + # Get plot function + plot_func = self._get_plot_func(plot_type) + + # Create a single plot for each dataset (incl. reference dataset if + # given) + for dataset in datasets: + if dataset == ref_dataset: + continue + ancestors = [dataset['filename']] + if ref_dataset is None: + (plot_path, netcdf_paths) = ( + self._plot_hovmoeller_time_vs_lat_or_lon_without_ref( + plot_func, + dataset) + ) + caption = ( + f"Hovmoeller plot of {dataset['long_name']} of dataset " + f"{dataset['dataset']} (project {dataset['project']}) " + f"from {dataset['start_year']} to {dataset['end_year']}." + ) + else: + (plot_path, netcdf_paths) = ( + self._plot_hovmoeller_time_vs_lat_or_lon_with_ref( + plot_func, dataset, ref_dataset) + ) + caption = ( + f"Hovmoeller plot of {dataset['long_name']} of dataset " + f"{dataset['dataset']} (project {dataset['project']}) " + f"including bias relative to {ref_dataset['dataset']} " + f"(project {ref_dataset['project']}) from " + f"{dataset['start_year']} to {dataset['end_year']}." + ) + ancestors.append(ref_dataset['filename']) + + # Save plot + plt.savefig(plot_path, **self.cfg['savefig_kwargs']) + logger.info("Wrote %s", plot_path) + plt.close() + + # Save netCDFs + for (netcdf_path, cube) in netcdf_paths.items(): + io.iris_save(cube, netcdf_path) + + # Provenance tracking + provenance_record = { + 'ancestors': ancestors, + 'authors': ['schlund_manuel', 'kraft_jeremy', 'ruhe_lukas'], + 'caption': caption, + 'plot_types': ['zonal'], + 'long_names': [dataset['long_name']], + } + with ProvenanceLogger(self.cfg) as provenance_logger: + provenance_logger.log(plot_path, provenance_record) + for netcdf_path in netcdf_paths: + provenance_logger.log(netcdf_path, provenance_record) + def compute(self): """Plot preprocessed data.""" - for (short_name, datasets) in self.grouped_input_data.items(): - logger.info("Processing variable %s", short_name) - self.create_timeseries_plot(datasets, short_name) - self.create_annual_cycle_plot(datasets, short_name) - self.create_map_plot(datasets, short_name) - self.create_zonal_mean_profile_plot(datasets, short_name) - self.create_1d_profile_plot(datasets, short_name) + for (var_key, datasets) in self.grouped_input_data.items(): + logger.info("Processing variable %s", var_key) + self.create_timeseries_plot(datasets) + self.create_annual_cycle_plot(datasets) + self.create_map_plot(datasets) + self.create_zonal_mean_profile_plot(datasets) + self.create_1d_profile_plot(datasets) + self.create_variable_vs_lat_plot(datasets) + self.create_hovmoeller_z_vs_time_plot(datasets) + self.create_hovmoeller_time_vs_lat_or_lon_plot(datasets) def main(): """Run diagnostic.""" with run_diagnostic() as config: - MultiDatasets(config).compute() + with warnings.catch_warnings(): + warnings.filterwarnings( + 'ignore', + message="Using DEFAULT_SPHERICAL_EARTH_RADIUS", + category=UserWarning, + module='iris', + ) + MultiDatasets(config).compute() if __name__ == '__main__': diff --git a/esmvaltool/diag_scripts/perfmetrics/collect.ncl b/esmvaltool/diag_scripts/perfmetrics/collect.ncl index 0fd2efe311..a2b14ab733 100644 --- a/esmvaltool/diag_scripts/perfmetrics/collect.ncl +++ b/esmvaltool/diag_scripts/perfmetrics/collect.ncl @@ -351,9 +351,9 @@ begin end if if (n_proj .gt. 1) then id2 = ind(projectnames.eq.diag_script_info@project_order(1)) - id2_mm = (/ind(data_all&models(id2) .eq. \ + id2_mm = (/ind(data_temp&models(id2) .eq. \ diag_script_info@project_order(1) + "_mean"), \ - ind(data_all&models(id2) .eq. \ + ind(data_temp&models(id2) .eq. \ diag_script_info@project_order(1) + "_median") /) if (any(ismissing(id2_mm))) then id2_mm = -1 @@ -368,9 +368,9 @@ begin end if if (n_proj .gt. 2) then id3 = ind(projectnames.eq.diag_script_info@project_order(2)) - id3_mm = (/ind(data_all&models(id3) .eq. \ + id3_mm = (/ind(data_temp&models(id3) .eq. \ diag_script_info@project_order(2) + "_mean"), \ - ind(data_all&models(id3) .eq. \ + ind(data_temp&models(id3) .eq. \ diag_script_info@project_order(2) + "_median") /) if (any(ismissing(id3_mm))) then id3_mm = -1 @@ -502,8 +502,9 @@ begin domains = (/"global"/) ; Call provenance logger - log_provenance(ncdf_outfile, plotpath, caption, statistics, domains, \ - plottype, authors, references, data_files) + log_provenance(ncdf_outfile, plotpath + "." + file_type, caption, \ + statistics, domains, plottype, authors, references, \ + data_files) leave_msg(DIAG_SCRIPT, "") diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig2.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig2.ncl index aa0db4d6a6..034d3a1287 100644 --- a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig2.ncl +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig2.ncl @@ -158,7 +158,7 @@ begin ; Call provenance logger log_provenance(ncdf_outfile, \ - plotpath, \ + plotpath + "." + output_type(), \ "Russell et al 2018 figure 2", \ "mean", \ "sh", \ diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig3b-2.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig3b-2.ncl index c182468947..c912663321 100644 --- a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig3b-2.ncl +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig3b-2.ncl @@ -274,7 +274,7 @@ begin delete(last_index) delete(out_var) log_provenance(ncdf_outfile, \ - plotpath, \ + plotpath + "." + output_type(), \ "Russell et al 2018 figure 3b part 2", \ "mean", \ "sh", \ diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig3b.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig3b.ncl index 0b6b9d155f..fd4cf3f43b 100644 --- a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig3b.ncl +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig3b.ncl @@ -264,7 +264,7 @@ begin delete(last_index) delete(out_var) log_provenance(ncdf_outfile, \ - plotpath, \ + plotpath + "." + output_type(), \ "Russell et al 2018 figure 3b", \ "mean", \ "sh", \ diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig4.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig4.ncl index 10f94a62b2..4e62c7b552 100644 --- a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig4.ncl +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig4.ncl @@ -368,7 +368,7 @@ begin delete(totaltransport) ; Call provenance logger log_provenance(ncdf_outfile, \ - plotpath, \ + plotpath + "." + file_type, \ "Russell et al 2018 figure 4", \ "mean", \ "sh", \ diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig5.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig5.ncl index bddd26791b..5a60ea03d9 100644 --- a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig5.ncl +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig5.ncl @@ -169,7 +169,7 @@ begin delete(dataset) log_provenance(ncdf_outfile, \ - plotpath, \ + plotpath + "." + output_type(), \ "Russell et al 2018 figure 5 -polar", \ "mean", \ "sh", \ diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig5g.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig5g.ncl index 128fb25f43..67d8c53491 100644 --- a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig5g.ncl +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig5g.ncl @@ -226,7 +226,7 @@ begin ; Call provenance logger log_provenance(ncdf_outfile, \ - plotpath, \ + plotpath + "." + output_type(), \ "Russell et al 2018 figure 5g", \ "mean", \ "sh", \ diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6a.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6a.ncl index 29bbe4ace7..206d2909cc 100644 --- a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6a.ncl +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6a.ncl @@ -699,7 +699,7 @@ begin ncdf_outfile = ncdf_write(out_var, nc_filename) log_provenance(ncdf_outfile, \ - plotpath, \ + plotpath + "." + output_type(), \ "Russell et al 2018 figure 6 part a", \ "mean", \ "sh", \ diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6b.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6b.ncl index bbbf72c72d..2287b62adf 100644 --- a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6b.ncl +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6b.ncl @@ -678,7 +678,7 @@ begin ncdf_outfile = ncdf_write(out_var, nc_filename) log_provenance(ncdf_outfile, \ - plotpath, \ + plotpath + "." + output_type(), \ "Russell et al 2018 figure 6b", \ "mean", \ "sh", \ diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig7h.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig7h.ncl index 4a698f455e..249712d29f 100644 --- a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig7h.ncl +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig7h.ncl @@ -164,7 +164,7 @@ begin delete(var_lon_avg) delete(dataset) log_provenance(ncdf_outfile, \ - plotpath, \ + plotpath + "." + output_type(), \ "Russell et al 2018 figure 7h", \ "mean", \ "sh", \ diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig7i.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig7i.ncl index dd56bd4a41..7abcc9e7a9 100644 --- a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig7i.ncl +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig7i.ncl @@ -202,7 +202,7 @@ begin end if log_provenance(ncdf_outfile, \ - plotpath, \ + plotpath + "." + output_type(), \ "Russell et al 2018 figure 7i", \ "mean", \ "sh", \ diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9a.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9a.ncl index 99cf97b2ce..8bef8cb204 100644 --- a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9a.ncl +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9a.ncl @@ -317,7 +317,7 @@ begin ncdf_outfile = ncdf_write(outvar, nc_filename) log_provenance(ncdf_outfile, \ - plotpath, \ + plotpath + "." + output_type(), \ "Russell et al 2018 figure 9a", \ "mean", \ "sh", \ diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9b.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9b.ncl index bade43d9cf..8afd05425a 100644 --- a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9b.ncl +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9b.ncl @@ -335,7 +335,7 @@ begin ncdf_outfile = ncdf_write(outvar, nc_filename) log_provenance(ncdf_outfile, \ - plotpath, \ + plotpath + "." + output_type(), \ "Russell et al 2018 figure 9b", \ "mean", \ "sh", \ diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9c.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9c.ncl index e669bac579..0e5c828a6a 100644 --- a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9c.ncl +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9c.ncl @@ -331,7 +331,7 @@ begin ncdf_outfile = ncdf_write(outvar, nc_filename) log_provenance(ncdf_outfile, \ - plotpath, \ + plotpath + "." + output_type(), \ "Russell et al 2018 figure 9c", \ "mean", \ "sh", \ diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-polar.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-polar.ncl index e01771babf..b9c8f1faaa 100644 --- a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-polar.ncl +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-polar.ncl @@ -232,7 +232,7 @@ begin ncdf_outfile = ncdf_write(dataset, nc_filename) delete(dataset) log_provenance(ncdf_outfile, \ - plotpath, \ + plotpath + "." + output_type(), \ "Russell et al 2018 polar plot " + var0, \ "mean", \ "sh", \ diff --git a/esmvaltool/diag_scripts/shapeselect/diag_shapeselect.py b/esmvaltool/diag_scripts/shapeselect/diag_shapeselect.py index 403bc3e630..94ab3b14ec 100644 --- a/esmvaltool/diag_scripts/shapeselect/diag_shapeselect.py +++ b/esmvaltool/diag_scripts/shapeselect/diag_shapeselect.py @@ -11,8 +11,11 @@ from shapely.geometry import MultiPoint, shape from shapely.ops import nearest_points -from esmvaltool.diag_scripts.shared import (run_diagnostic, ProvenanceLogger, - get_diagnostic_filename) +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_diagnostic_filename, + run_diagnostic, +) logger = logging.getLogger(os.path.basename(__file__)) @@ -48,16 +51,22 @@ def main(cfg): xname = name + '_table' writexls(cfg, filename, ncts, nclon, nclat) caption = 'Selected gridpoints within shapefile.' - get_provenance_record( - cfg, xname, caption, 'xlsx', ancestor_files=[filename]) + get_provenance_record(cfg, + xname, + caption, + 'xlsx', + ancestor_files=[filename]) path = os.path.join( cfg['work_dir'], name + '.nc', ) write_netcdf(path, ncts, nclon, nclat, cube, cfg) caption = 'Selected gridpoints within shapefile.' - get_provenance_record( - cfg, name, caption, 'nc', ancestor_files=[filename]) + get_provenance_record(cfg, + name, + caption, + 'nc', + ancestor_files=[filename]) def write_keyvalue_toxlsx(worksheet, row, key, value): @@ -93,17 +102,16 @@ def writexls(cfg, filename, ncts, nclon1, nclat1): workbook = xlsxwriter.Workbook( os.path.join( cfg['work_dir'], - os.path.splitext(os.path.basename(filename))[0] + '_polygon_table' - + '.xlsx')) + os.path.splitext(os.path.basename(filename))[0] + + '_polygon_table' + '.xlsx')) worksheet = workbook.add_worksheet('Data') worksheet.write(0, 0, 'Date') worksheet.write(0, 1, 'Lon/Lat') worksheet.write_column(2, 0, wtime) for row in range(ncts.shape[1]): worksheet.write( - 1, row + 1, - str("%#.3f" % round(float(nclon1[row]), 3)) + '_' + str( - "%#.3f" % round(float(nclat1[row]), 3))) + 1, row + 1, f"{round(float(nclon1[row]), 3):.3f}_\ + {round(float(nclat1[row]), 3)}") worksheet.write_column(2, row + 1, np.around(np.squeeze(ncts[:, row]), decimals=8)) worksheet.set_column(0, row + 1, 20) @@ -135,7 +143,8 @@ def shapeselect(cfg, cube): coordpoints[i] = (coordpoints[i][0] - 360., coordpoints[i][1]) else: raise ValueError("Support for 2-d coords not implemented!") - points = MultiPoint(coordpoints) + multipoint = MultiPoint(coordpoints) + points = list(multipoint.geoms) with fiona.open(shppath) as shp: gpx = [] gpy = [] @@ -149,15 +158,16 @@ def shapeselect(cfg, cube): if wgtmet == 'mean_inside': gpx, gpy = mean_inside(gpx, gpy, points, multi, cube) if not gpx: - gpx, gpy = representative(gpx, gpy, points, multi, cube) + gpx, gpy = representative(gpx, gpy, multipoint, multi, + cube) elif wgtmet == 'representative': - gpx, gpy = representative(gpx, gpy, points, multi, cube) + gpx, gpy = representative(gpx, gpy, multipoint, multi, cube) if len(gpx) == 1: ncts[:, ishp] = np.reshape(cube.data[:, gpy, gpx], (cube.data.shape[0], )) else: ncts[:, ishp] = np.mean(cube.data[:, gpy, gpx], axis=1) - gxx, gyy = representative([], [], points, multi, cube) + gxx, gyy = representative([], [], multipoint, multi, cube) nclon[ishp] = cube.coord('longitude').points[gxx] nclat[ishp] = cube.coord('latitude').points[gyy] return ncts, nclon, nclat @@ -179,10 +189,10 @@ def mean_inside(gpx, gpy, points, multi, cube): return gpx, gpy -def representative(gpx, gpy, points, multi, cube): +def representative(gpx, gpy, multipoint, multi, cube): """Find representative point in shape.""" reprpoint = multi.representative_point() - nearest = nearest_points(reprpoint, points) + nearest = nearest_points(reprpoint, multipoint) npx = nearest[1].coords[0][0] npy = nearest[1].coords[0][1] if npx < 0: @@ -235,19 +245,24 @@ def write_netcdf(path, var, plon, plat, cube, cfg): polys.setncattr_string('standard_name', 'polygon') polys.setncattr_string('long_name', 'polygon') polys.setncattr_string('shapefile', cfg['shapefile']) - lon = ncout.createVariable( - cube.coord('longitude').var_name, 'f8', 'polygon', zlib=True) + lon = ncout.createVariable(cube.coord('longitude').var_name, + 'f8', + 'polygon', + zlib=True) lon.setncattr_string('standard_name', cube.coord('longitude').standard_name) lon.setncattr_string('long_name', cube.coord('longitude').long_name) lon.setncattr_string('units', cube.coord('longitude').units.origin) - lat = ncout.createVariable( - cube.coord('latitude').var_name, 'f8', 'polygon', zlib=True) + lat = ncout.createVariable(cube.coord('latitude').var_name, + 'f8', + 'polygon', + zlib=True) lat.setncattr_string('standard_name', cube.coord('latitude').standard_name) lat.setncattr_string('long_name', cube.coord('latitude').long_name) lat.setncattr_string('units', cube.coord('latitude').units.origin) - data = ncout.createVariable( - cube.var_name, 'f4', ('time', 'polygon'), zlib=True) + data = ncout.createVariable(cube.var_name, + 'f4', ('time', 'polygon'), + zlib=True) data.setncattr_string('standard_name', cube.standard_name) data.setncattr_string('long_name', cube.long_name) data.setncattr_string('units', cube.units.origin) diff --git a/esmvaltool/diag_scripts/shared/scaling.ncl b/esmvaltool/diag_scripts/shared/scaling.ncl index 986398746d..a2ef8f185d 100644 --- a/esmvaltool/diag_scripts/shared/scaling.ncl +++ b/esmvaltool/diag_scripts/shared/scaling.ncl @@ -169,6 +169,15 @@ begin end if end if + if (units_from.eq."kg.s-1") then + if (any(units_to.eq.(/"PgC y-1", "GtC y-1"/))) then + out = out * 3600. * 24. * 365 / 1.e12 + out@units = units_to + leave_msg(scriptname, funcname) + return(out) + end if + end if + if (units_from.eq."kg m-2 s-1") then if (any(units_to.eq.(/"PgC y-1", "GtC y-1"/))) then out = out * 3600. * 24. * 365 / 1.e12 diff --git a/esmvaltool/diag_scripts/tebaldi21esd/calc_IAV_hatching.ncl b/esmvaltool/diag_scripts/tebaldi21esd/calc_IAV_hatching.ncl index 8484a00871..f8f0d83511 100644 --- a/esmvaltool/diag_scripts/tebaldi21esd/calc_IAV_hatching.ncl +++ b/esmvaltool/diag_scripts/tebaldi21esd/calc_IAV_hatching.ncl @@ -399,7 +399,6 @@ begin ; collect meta-data nc_file = ncdf_outfile - plot_file = plot_dir + "/None" caption = "Inter-annual variability based on piControl runs." statistics = ("var") domains = ("global") @@ -407,7 +406,7 @@ begin authors = (/"lorenz_ruth"/) references = (/"collins13ipcc"/) infiles = metadata_att_as_array(info_items, "filename") - log_provenance(nc_file, plot_file, caption, statistics, domains, \ + log_provenance(nc_file, "n/a", caption, statistics, domains, \ plot_types, authors, references, infiles) leave_msg(DIAG_SCRIPT, "") diff --git a/esmvaltool/diag_scripts/validation.py b/esmvaltool/diag_scripts/validation.py index d72cc5e1ad..52c8d1a561 100644 --- a/esmvaltool/diag_scripts/validation.py +++ b/esmvaltool/diag_scripts/validation.py @@ -29,27 +29,11 @@ def _get_provenance_record(cfg, plot_file, caption, loc): """Create a provenance record describing the diagnostic data and plot.""" - all_input_files = [ - k for k in cfg["input_data"].keys() if k.endswith(".nc") - ] - if "_vs_" in plot_file: - model_1 = plot_file.split("_vs_")[0].split("_")[-1] - if plot_file.endswith(".png"): - model_2 = plot_file.split("_vs_")[1].strip(".png") - elif plot_file.endswith(".nc"): - model_2 = plot_file.split("_vs_")[1].strip(".nc") - ancestor_1 = [ - k for k in all_input_files if model_1 in os.path.basename(k) - ][0] - ancestor_2 = [ - k for k in all_input_files if model_2 in os.path.basename(k) - ][0] - ancestor_files = [ancestor_1, ancestor_2] - else: - model = os.path.basename(plot_file).split("_")[0] - ancestor_files = [ - k for k in all_input_files if model in os.path.basename(k) - ] + ancestor_files = [] + for dataset in cfg['input_data'].values(): + if (dataset['alias'] in plot_file and + dataset['short_name'] in plot_file): + ancestor_files.append(dataset['filename']) record = { 'caption': caption, 'statistics': ['mean'], @@ -72,9 +56,9 @@ def _get_provenance_record(cfg, plot_file, caption, loc): def plot_contour(cube, cfg, plt_title, file_name): """Plot a contour with iris.quickplot (qplot).""" if len(cube.shape) == 2: - qplt.contourf(cube, cmap='RdYlBu_r', bbox_inches='tight') + qplt.contourf(cube, cmap='RdYlBu_r') else: - qplt.contourf(cube[0], cmap='RdYlBu_r', bbox_inches='tight') + qplt.contourf(cube[0], cmap='RdYlBu_r') plt.title(plt_title) plt.gca().coastlines() plt.tight_layout() @@ -138,7 +122,10 @@ def plot_latlon_cubes(cube_1, # plot each cube var = data_names.split('_')[0] if not obs_name: - cube_names = [data_names.split('_')[1], data_names.split('_')[3]] + cube_names = [ + data_names.replace(f'{var}_', '').split('_vs_')[i] for i in + range(2) + ] for cube, cube_name in zip(cubes, cube_names): if not season: plot_file_path = os.path.join( @@ -179,23 +166,40 @@ def plot_zonal_cubes(cube_1, cube_2, cfg, plot_data): # xcoordinate: latotude or longitude (str) data_names, xcoordinate, period = plot_data var = data_names.split('_')[0] - cube_names = [data_names.split('_')[1], data_names.split('_')[3]] + cube_names = data_names.replace(var + '_', '').split('_vs_') lat_points = cube_1.coord(xcoordinate).points plt.plot(lat_points, cube_1.data, label=cube_names[0]) plt.plot(lat_points, cube_2.data, label=cube_names[1]) + plt.title(f'Annual Climatology of {var}' if period == 'alltime' + else f'{period} of {var}') if xcoordinate == 'latitude': - plt.title(period + ' Zonal Mean for ' + var + ' ' + data_names) + axis = plt.gca() + axis.set_xticks([-60, -30, 0, 30, 60], + labels=['60\N{DEGREE SIGN} S', + '30\N{DEGREE SIGN} S', + '0\N{DEGREE SIGN}', + '30\N{DEGREE SIGN} N', + '60\N{DEGREE SIGN} N']) elif xcoordinate == 'longitude': - plt.title(period + ' Meridional Mean for ' + var + ' ' + data_names) + axis = plt.gca() + axis.set_xticks([0, 60, 120, 180, 240, 300, 360], + labels=['0\N{DEGREE SIGN} E', + '60\N{DEGREE SIGN} E', + '120\N{DEGREE SIGN} E', + '180\N{DEGREE SIGN} E', + '240\N{DEGREE SIGN} E', + '300\N{DEGREE SIGN} E', + '0\N{DEGREE SIGN} E']) plt.xlabel(xcoordinate + ' (deg)') - plt.ylabel(var) + plt.ylabel(f'{var} [{str(cube_1.units)}]') plt.tight_layout() plt.grid() plt.legend() + png_name = f'{xcoordinate}_{period}_{data_names}.png' if xcoordinate == 'latitude': - png_name = 'Zonal_Mean_' + xcoordinate + '_' + data_names + '.png' + png_name = 'Zonal_Mean_' + png_name elif xcoordinate == 'longitude': - png_name = 'Merid_Mean_' + xcoordinate + '_' + data_names + '.png' + png_name = 'Merid_Mean_' + png_name plot_file_path = os.path.join(cfg['plot_dir'], period, png_name) plt.savefig(plot_file_path) save_plotted_cubes( @@ -252,13 +256,13 @@ def coordinate_collapse(data_set, cfg): if 'mask_threshold' in cfg: thr = cfg['mask_threshold'] data_set.data = np.ma.masked_array(data_set.data, - mask=(mask_cube.data > thr)) + mask=mask_cube.data > thr) else: logger.warning('Could not find masking threshold') logger.warning('Please specify it if needed') logger.warning('Masking on 0-values = True (masked value)') data_set.data = np.ma.masked_array(data_set.data, - mask=(mask_cube.data == 0)) + mask=mask_cube.data == 0) # if zonal mean on LON if analysis_type == 'zonal_mean': diff --git a/esmvaltool/diag_scripts/xco2_analysis/delta_T.ncl b/esmvaltool/diag_scripts/xco2_analysis/delta_T.ncl index af7498d75b..f83f56cb31 100644 --- a/esmvaltool/diag_scripts/xco2_analysis/delta_T.ncl +++ b/esmvaltool/diag_scripts/xco2_analysis/delta_T.ncl @@ -332,7 +332,7 @@ begin ncdf_outfile = ncdf_write(delta_t_detr, outfile_netcdf_dtgr_detr) log_provenance(ncdf_outfile, \ - outfile_netcdf_dtgr_detr, \ + outfile_dtgr_detr + "." + file_type, \ "Sensitivity of interannual variability of " \ + var0 + " growth rate in the " + region + ", " + \ start_year + "-" + end_year + "to the interannual " \ diff --git a/esmvaltool/diag_scripts/xco2_analysis/global_maps.ncl b/esmvaltool/diag_scripts/xco2_analysis/global_maps.ncl index 9aa205db32..d1748425d4 100644 --- a/esmvaltool/diag_scripts/xco2_analysis/global_maps.ncl +++ b/esmvaltool/diag_scripts/xco2_analysis/global_maps.ncl @@ -472,7 +472,7 @@ begin ; Provenance log_provenance(ncdf_outfile, \ - outfile, \ + outfile + "." + file_type, \ "Maps of mean Seasonal Cycle Amplitude of " + var0 + " " \ + DATASETS(imod) + " for " + start_year + "-" \ + end_year + ". Top: SCA with observational sampling (left)" \ @@ -587,7 +587,7 @@ begin ; Provenance log_provenance(ncdf_outfile, \ - outfile_scap, \ + outfile_scap + "." + file_type, \ "Maps of mean Seasonal Cycle Amplitude of " \ + var0 + " for " + start_year + "-" \ + end_year + ". Similar to Gier et al 2020, Fig 5.", \ diff --git a/esmvaltool/diag_scripts/xco2_analysis/main.ncl b/esmvaltool/diag_scripts/xco2_analysis/main.ncl index 8f585e5f62..0739e52edb 100644 --- a/esmvaltool/diag_scripts/xco2_analysis/main.ncl +++ b/esmvaltool/diag_scripts/xco2_analysis/main.ncl @@ -438,7 +438,7 @@ begin ; Provenance log_provenance(ncdf_outfile, \ - outfile_ts_panels, \ + outfile_ts_panels + "." + file_type, \ "Timeseries, growth rate and seasonal cycle of " \ + var0 + " for " + region + ", " + start_year + "-" \ + end_year + ". Using masking: " + opt_mask \ @@ -550,7 +550,7 @@ begin ; Provenance log_provenance(ncdf_outfile, \ - outfile_grbp, \ + outfile_grbp + "." + file_type, \ "Growth Rate histogram of " \ + var0 + " for " + region + ", " + start_year + "-" \ + end_year + ". Using masking: " + opt_mask \ @@ -653,7 +653,7 @@ begin ; Provenance log_provenance(ncdf_outfile, \ - outfile_sabp, \ + outfile_sabp + "." + file_type, \ "Seasonal Cycle Amplitude histogram of " \ + var0 + " for " + region + ", " + start_year + "-" \ + end_year + ". Using masking: " + opt_mask, \ diff --git a/esmvaltool/diag_scripts/xco2_analysis/panel_plots.ncl b/esmvaltool/diag_scripts/xco2_analysis/panel_plots.ncl index bcd4c82002..08fe30ad0f 100644 --- a/esmvaltool/diag_scripts/xco2_analysis/panel_plots.ncl +++ b/esmvaltool/diag_scripts/xco2_analysis/panel_plots.ncl @@ -360,7 +360,7 @@ begin ; Provenance log_provenance(ncdf_outfile, \ - outfile_scaplot, \ + outfile_scaplot + "." + file_type, \ "Trend of Seasonal Cycle Amplitude with " \ + var0 + " for " + region + ", " + start_year + "-" \ + end_year + ". Using masking: " + opt_mask \ @@ -391,7 +391,7 @@ begin ; Provenance log_provenance(ncdf_outfile, \ - outfile_grplot, \ + outfile_grplot + "." + file_type, \ "Trend of Growth Rate with " \ + var0 + " for " + region + ", " + start_year + "-" \ + end_year + ". Using masking: " + opt_mask \ diff --git a/esmvaltool/diag_scripts/xco2_analysis/sat_masks.ncl b/esmvaltool/diag_scripts/xco2_analysis/sat_masks.ncl index c54df52d90..2bed5c74a6 100644 --- a/esmvaltool/diag_scripts/xco2_analysis/sat_masks.ncl +++ b/esmvaltool/diag_scripts/xco2_analysis/sat_masks.ncl @@ -258,7 +258,7 @@ begin ; Provenance log_provenance(ncdf_outfile, \ - outfile, \ + outfile + "." + file_type, \ var0 + "fractional data coverage " + start_year + "-" \ + end_year + ". Similar to Gier et al 2020, Fig 1.", \ (/"mean"/), \ @@ -377,7 +377,7 @@ begin delete(res) log_provenance(ncdf_outfile, \ - outfile_c3s, \ + outfile_c3s + "." + file_type, \ var0 + "fractional data coverage for different obs " \ + "periods. Similar to Gier et al 2020, Fig 8.", \ (/"mean"/), \ diff --git a/esmvaltool/diag_scripts/xco2_analysis/station_comparison.ncl b/esmvaltool/diag_scripts/xco2_analysis/station_comparison.ncl index 53734d94db..23155e36fa 100644 --- a/esmvaltool/diag_scripts/xco2_analysis/station_comparison.ncl +++ b/esmvaltool/diag_scripts/xco2_analysis/station_comparison.ncl @@ -285,7 +285,7 @@ begin ; Provenance log_provenance(ncdf_outfile, \ - outfile_stations_masked, \ + outfile_stations_masked + "." + file_type, \ "Time series of station " + station_array&station(istat) \ + ", dotted denoting areas masked like observations.", \ (/""/), \ @@ -334,7 +334,7 @@ begin ; Provenance log_provenance(ncdf_outfile, \ - outfile_stations_obs, \ + outfile_stations_obs + "." + file_type, \ "Time series of station " + station_array&station(istat) \ + ", for " + var0 + " and " + var1 + ".", \ (/"mean"/), \ @@ -501,7 +501,7 @@ begin ; Provenance log_provenance(ncdf_outfile, \ - outfile_map, \ + outfile_map + "." + file_type, \ "Time series for satellite, multi-model mean and station " \ + var0 + " and " + var1 \ + ". Similar to Gier et al, 2020 Fig. 2", \ diff --git a/esmvaltool/interface_scripts/logging.ncl b/esmvaltool/interface_scripts/logging.ncl index 8db7d8fe09..4cb5ff1498 100644 --- a/esmvaltool/interface_scripts/logging.ncl +++ b/esmvaltool/interface_scripts/logging.ncl @@ -334,9 +334,54 @@ begin ; Save entries for outfile if not "n/a" if (outfile .ne. "n/a") then - outstring2 = outstring - outstring2(0) = "? " + outfile - outstring := array_append_record(outstring, outstring2, 0) + suffix = get_file_suffix(outfile, 0) + + if (ismissing(suffix)) then + error_msg("f", scriptname, funcname, "got invalid value for outfile " + \ + "(path to figure): '" + outfile + "'; expected path to a " + \ + "file or 'n/a'") + end if + + ; For PNGs, additionally check for existence of files like + ; "plot_file.000001.png", "plot_file.000002.png", etc. and save + ; provenance record for each of them + if ((suffix .eq. ".png") .and. (.not. fileexists(outfile))) then + do file_idx = 1, 999999 + potential_outfile = suffix@fBase + "." + sprinti("%0.6i", file_idx) + \ + suffix + if (fileexists(potential_outfile)) then + if (.not. isvar("all_outfiles")) then + all_outfiles = potential_outfile + else + all_outfiles := array_append_record(all_outfiles, \ + potential_outfile, 0) + end if + else + break + end if + end do + end if + + ; For all other cases, use outfile + if (.not. isvar("all_outfiles")) then + all_outfiles = outfile + end if + + ; Save provenance record of all files + original_entry = outstring + do outfile_idx = 0, dimsizes(all_outfiles) - 1 + file_to_add := all_outfiles(outfile_idx) + if (.not. fileexists(outfile)) then + error_msg("f", scriptname, funcname, "outfile (path to figure) '" + \ + file_to_add + "' does not exist (for PNGs, this " + \ + "function also searches for 'FILE.000001.png', " + \ + "'FILE.000002.png', etc.); if no plot file is available " + \ + "use 'n/a'") + end if + new_entry = (/original_entry/) + new_entry(0) = "? " + file_to_add + outstring := array_append_record(outstring, new_entry, 0) + end do end if ; Save existing information to avoid overwriting diff --git a/esmvaltool/recipes/bock20jgr/recipe_bock20jgr_fig_1-4.yml b/esmvaltool/recipes/bock20jgr/recipe_bock20jgr_fig_1-4.yml index aaca95d116..1ae9066ec3 100644 --- a/esmvaltool/recipes/bock20jgr/recipe_bock20jgr_fig_1-4.yml +++ b/esmvaltool/recipes/bock20jgr/recipe_bock20jgr_fig_1-4.yml @@ -400,11 +400,10 @@ diagnostics: variables: pr: <<: *var_cmip6_bias - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.3 additional_datasets: *cmip6_tas additional_datasets: - - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.3, - tier: 1} + - {dataset: GPCP-V2.3, project: obs4MIPs, tier: 1} scripts: model_bias: <<: *model_bias_settings @@ -416,15 +415,14 @@ diagnostics: variables: pr: <<: *var_cmip6_bias - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.3 project: CMIP5 ensemble: r1i1p1 start_year: 1985 end_year: 2004 additional_datasets: *cmip5_tas additional_datasets: - - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.3, - tier: 1} + - {dataset: GPCP-V2.3, project: obs4MIPs, tier: 1} scripts: model_bias: <<: *model_bias_settings @@ -436,7 +434,7 @@ diagnostics: variables: pr: <<: *var_cmip6_bias - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.3 mip: A1 project: CMIP3 modeling_realm: atm @@ -447,8 +445,7 @@ diagnostics: end_year: 1999 additional_datasets: *cmip3_tas additional_datasets: - - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.3, - tier: 1, mip: Amon} + - {dataset: GPCP-V2.3, project: obs4MIPs, tier: 1, mip: Amon} scripts: model_bias: <<: *model_bias_settings @@ -460,11 +457,10 @@ diagnostics: variables: pr: <<: *var_cmip6_bias - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.3 additional_datasets: *cmip6_highresmip_low additional_datasets: - - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.3, - tier: 1} + - {dataset: GPCP-V2.3, project: obs4MIPs, tier: 1} scripts: model_bias: <<: *model_bias_settings @@ -476,11 +472,10 @@ diagnostics: variables: pr: <<: *var_cmip6_bias - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.3 additional_datasets: *cmip6_highresmip_high additional_datasets: - - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.3, - tier: 1} + - {dataset: GPCP-V2.3, project: obs4MIPs, tier: 1} scripts: model_bias: <<: *model_bias_settings diff --git a/esmvaltool/recipes/bock20jgr/recipe_bock20jgr_fig_6-7.yml b/esmvaltool/recipes/bock20jgr/recipe_bock20jgr_fig_6-7.yml index c54ffc5f1e..93e4284ec5 100644 --- a/esmvaltool/recipes/bock20jgr/recipe_bock20jgr_fig_6-7.yml +++ b/esmvaltool/recipes/bock20jgr/recipe_bock20jgr_fig_6-7.yml @@ -359,7 +359,7 @@ diagnostics: variables: pr: preprocessor: ppNOLEV1 - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.2 alternative_dataset: GHCN mip: Amon project: CMIP5 @@ -393,8 +393,7 @@ diagnostics: - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} - - {dataset: GPCP-SG, project: obs4MIPs, level: L3, - version: v2.2, tier: 1} + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, tier: 1} - {dataset: GHCN, project: OBS, type: ground, version: 1, tier: 2} scripts: grading: @@ -530,8 +529,7 @@ diagnostics: - {dataset: CIESM, grid: gr, ensemble: r1i1p1f1, project: CMIP6} - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, - version: Ed2-8, tier: 1, start_year: 2001, end_year: 2015} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1, start_year: 2001, end_year: 2015} scripts: grading: <<: *grading_settings @@ -568,8 +566,7 @@ diagnostics: - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, project: CMIP6} - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, - version: Ed2-8, tier: 1, start_year: 2001, end_year: 2015} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1, start_year: 2001, end_year: 2015} scripts: grading: <<: *grading_settings @@ -611,8 +608,7 @@ diagnostics: - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, - version: Ed2-8, tier: 1, start_year: 2001, end_year: 2015} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1, start_year: 2001, end_year: 2015} scripts: grading: <<: *grading_settings @@ -628,7 +624,7 @@ diagnostics: variables: hus: preprocessor: pp400 - reference_dataset: AIRS + reference_dataset: AIRS-2-1 alternative_dataset: ERA-Interim mip: Amon project: CMIP5 @@ -652,10 +648,8 @@ diagnostics: - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} - - {dataset: AIRS, project: obs4MIPs, level: L3, - version: RetStd-v5, tier: 1, start_year: 2003, end_year: 2010} - - {dataset: ERA-Interim, project: OBS6, type: reanaly, - version: 1, tier: 3} + - {dataset: AIRS-2-1, project: obs4MIPs, level: L3, tier: 1, start_year: 2003, end_year: 2010} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} scripts: grading: <<: *grading_settings @@ -694,8 +688,7 @@ diagnostics: - {dataset: CIESM, grid: gr, ensemble: r1i1p1f1, project: CMIP6} - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, - version: Ed2-8, tier: 1, start_year: 2001, end_year: 2015} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1, start_year: 2001, end_year: 2015} scripts: grading: <<: *grading_settings @@ -744,8 +737,7 @@ diagnostics: - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} - - {dataset: ERA-Interim, project: OBS6, type: reanaly, - version: 1, tier: 3} + - {dataset: ERA-Interim, project: OBS6, type: reanaly, version: 1, tier: 3} - {dataset: NCEP-NCAR-R1, project: OBS6, type: reanaly, version: 1, tier: 2} scripts: grading: @@ -1085,7 +1077,7 @@ diagnostics: variables: pr: <<: *var_settings - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.2 alternative_dataset: GHCN additional_datasets: - {<<: *cmip3, dataset: bccr_bcm2_0, institute: BCCR} @@ -1113,7 +1105,7 @@ diagnostics: - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} - - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.2, tier: 1} + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, tier: 1} - {dataset: GHCN, project: OBS, type: ground, version: 1, tier: 2} psl_cor: @@ -1175,8 +1167,7 @@ diagnostics: - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} - {dataset: MCM-UA-1-0, grid: gn, ensemble: r1i1p1f1, project: CMIP6} - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-8, - tier: 1, start_year: 2001, end_year: 2015} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1, start_year: 2001, end_year: 2015} swcre_cor: <<: *corr_diag @@ -1199,8 +1190,7 @@ diagnostics: - {dataset: CanESM5-CanOE, ensemble: r1i1p2f1, grid: gn, project: CMIP6} - {dataset: FGOALS-f3-L, ensemble: r1i1p1f1, grid: gr, project: CMIP6} - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-8, - tier: 1, start_year: 2001, end_year: 2015} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1, start_year: 2001, end_year: 2015} fig_7: title: Collection of pattern correlations diff --git a/esmvaltool/recipes/bock20jgr/recipe_bock20jgr_fig_8-10.yml b/esmvaltool/recipes/bock20jgr/recipe_bock20jgr_fig_8-10.yml index 652490ca09..a01020e709 100644 --- a/esmvaltool/recipes/bock20jgr/recipe_bock20jgr_fig_8-10.yml +++ b/esmvaltool/recipes/bock20jgr/recipe_bock20jgr_fig_8-10.yml @@ -415,8 +415,7 @@ diagnostics: derive: true additional_datasets: *cmip3_all additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: script: clouds/clouds_ipcc.ncl diff --git a/esmvaltool/recipes/clouds/recipe_clouds_bias.yml b/esmvaltool/recipes/clouds/recipe_clouds_bias.yml index 0d28235431..d424397b22 100644 --- a/esmvaltool/recipes/clouds/recipe_clouds_bias.yml +++ b/esmvaltool/recipes/clouds/recipe_clouds_bias.yml @@ -171,11 +171,10 @@ diagnostics: variables: pr: preprocessor: clim - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.2 mip: Amon additional_datasets: - - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.2, - start_year: 1986, end_year: 2005, tier: 1} + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, start_year: 1986, end_year: 2005, tier: 1} scripts: clim: <<: *clim_settings @@ -198,11 +197,10 @@ diagnostics: variables: clt: preprocessor: clim - reference_dataset: MODIS + reference_dataset: MODIS-1-0 mip: Amon additional_datasets: - - {dataset: MODIS, project: obs4MIPs, level: L3, version: C5, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: MODIS-1-0, project: obs4MIPs, level: L3, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: <<: *clim_settings diff --git a/esmvaltool/recipes/clouds/recipe_clouds_ipcc.yml b/esmvaltool/recipes/clouds/recipe_clouds_ipcc.yml index 0df31e074e..2fb14b4795 100644 --- a/esmvaltool/recipes/clouds/recipe_clouds_ipcc.yml +++ b/esmvaltool/recipes/clouds/recipe_clouds_ipcc.yml @@ -145,8 +145,7 @@ diagnostics: mip: Amon derive: true additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: &clim_settings script: clouds/clouds_ipcc.ncl @@ -168,8 +167,7 @@ diagnostics: mip: Amon derive: true additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: <<: *clim_settings @@ -188,8 +186,7 @@ diagnostics: mip: Amon derive: true additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: <<: *clim_settings diff --git a/esmvaltool/recipes/clouds/recipe_lauer13jclim.yml b/esmvaltool/recipes/clouds/recipe_lauer13jclim.yml index 2a7051b5d5..3d741e485d 100644 --- a/esmvaltool/recipes/clouds/recipe_lauer13jclim.yml +++ b/esmvaltool/recipes/clouds/recipe_lauer13jclim.yml @@ -168,11 +168,10 @@ diagnostics: variables: clt: preprocessor: clim - reference_dataset: MODIS + reference_dataset: MODIS-1-0 mip: Amon additional_datasets: - - {dataset: MODIS, project: obs4MIPs, level: L3, version: C5, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: MODIS-1-0, project: obs4MIPs, level: L3, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: <<: *clim_settings @@ -187,11 +186,10 @@ diagnostics: variables: pr: preprocessor: clim - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.2 mip: Amon additional_datasets: - - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.2, - start_year: 1986, end_year: 2005, tier: 1} + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, start_year: 1986, end_year: 2005, tier: 1} scripts: clim: <<: *clim_settings @@ -211,8 +209,7 @@ diagnostics: mip: Amon derive: true additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: <<: *clim_settings @@ -232,8 +229,7 @@ diagnostics: mip: Amon derive: true additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: <<: *clim_settings @@ -253,11 +249,10 @@ diagnostics: variables: clt: preprocessor: clim - reference_dataset: MODIS + reference_dataset: MODIS-1-0 mip: Amon additional_datasets: - - {dataset: MODIS, project: obs4MIPs, level: L3, version: C5, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: MODIS-1-0, project: obs4MIPs, level: L3, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: &taylor_settings script: clouds/clouds_taylor.ncl @@ -314,8 +309,7 @@ diagnostics: mip: Amon derive: true additional_datasets: - - {dataset: UWisc, project: OBS, type: sat, version: v2, - start_year: 1988, end_year: 2007, tier: 3} + - {dataset: UWisc, project: OBS, type: sat, version: v2, start_year: 1988, end_year: 2007, tier: 3} scripts: clim: <<: *taylor_settings @@ -335,8 +329,7 @@ diagnostics: mip: Amon derive: true additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: <<: *taylor_settings @@ -356,8 +349,7 @@ diagnostics: mip: Amon derive: true additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: <<: *taylor_settings @@ -372,11 +364,10 @@ diagnostics: variables: pr: preprocessor: clim - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.2 mip: Amon additional_datasets: - - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.2, - start_year: 1986, end_year: 2005, tier: 1} + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, start_year: 1986, end_year: 2005, tier: 1} scripts: clim: <<: *taylor_settings @@ -402,8 +393,7 @@ diagnostics: mip: Amon derive: true additional_datasets: - - {dataset: UWisc, project: OBS, type: sat, version: v2, - start_year: 1988, end_year: 2007, tier: 3} + - {dataset: UWisc, project: OBS, type: sat, version: v2, start_year: 1988, end_year: 2007, tier: 3} scripts: clim: &intera_settings script: clouds/clouds_interannual.ncl @@ -435,11 +425,10 @@ diagnostics: variables: clt: preprocessor: clim - reference_dataset: MODIS + reference_dataset: MODIS-1-0 mip: Amon additional_datasets: - - {dataset: MODIS, project: obs4MIPs, level: L3, version: C5, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: MODIS-1-0, project: obs4MIPs, level: L3, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: <<: *intera_settings @@ -459,8 +448,7 @@ diagnostics: mip: Amon derive: true additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: <<: *intera_settings @@ -480,8 +468,7 @@ diagnostics: mip: Amon derive: true additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2010, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2010, tier: 1} scripts: clim: <<: *intera_settings @@ -496,11 +483,10 @@ diagnostics: variables: pr: preprocessor: clim - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.2 mip: Amon additional_datasets: - - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.2, - start_year: 1986, end_year: 2005, tier: 1} + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, start_year: 1986, end_year: 2005, tier: 1} scripts: clim: <<: *intera_settings diff --git a/esmvaltool/recipes/examples/recipe_check_obs.yml b/esmvaltool/recipes/examples/recipe_check_obs.yml index 24f1814fab..02b9469f51 100644 --- a/esmvaltool/recipes/examples/recipe_check_obs.yml +++ b/esmvaltool/recipes/examples/recipe_check_obs.yml @@ -558,6 +558,16 @@ diagnostics: scripts: null + MOBO-DIC2004-2019: + description: MOBO-DIC2004-2019 check + variables: + dissic: + additional_datasets: + - {dataset: MOBO-DIC2004-2019, project: OBS6, mip: Omon, tier: 2, + type: reanaly, version: '2.3', start_year: 2004, end_year: 2019} + scripts: null + + NCEP-NCAR-R1: description: NCEP-NCAR-R1 check variables: @@ -638,20 +648,45 @@ diagnostics: scripts: null - NOAA-CIRES-20CR: - description: NOAA-CIRES-20CR check + NOAA-CIRES-20CR-V2: + description: NOAA-CIRES-20CR-V2 check variables: clt: clwvi: hus: prw: - rsut: rlut: + rsut: additional_datasets: - - {dataset: NOAA-CIRES-20CR, project: OBS6, mip: Amon, tier: 2, + - {dataset: NOAA-CIRES-20CR-V2, project: OBS6, mip: Amon, tier: 2, type: reanaly, version: v2, start_year: 1871, end_year: 2012} scripts: null + NOAA-MBL-CH4: + description: NOAA marine boundary layer CH4 check + variables: + ch4s: + additional_datasets: + - {dataset: NOAA-MBL-CH4, project: OBS6, mip: Amon, type: atmos, version: 1.0, tier: 2, + start_year: 1983, end_year: 2023} + scripts: null + + NOAA-CIRES-20CR-V3: + description: NOAA-CIRES-20CR-V3 check + variables: + clt: + clwvi: + hus: + prw: + rlut: + rlutcs: + rsut: + rsutcs: + additional_datasets: + - {dataset: NOAA-CIRES-20CR-V3, project: OBS6, mip: Amon, tier: 2, + type: reanaly, version: v3, start_year: 1836, end_year: 2015} + scripts: null + NOAAGlobalTemp: description: NOAAGlobalTemp check @@ -1601,6 +1636,38 @@ diagnostics: type: sat, version: v1, start_year: 1988, end_year: 2016} scripts: null + MERRA: + description: MERRA check + variables: + cli: + clivi: + clt: + clw: + clwvi: + hur: + hus: + lwp: + pr: + prw: + ps: + psl: + rlut: + rlutcs: + rsdt: + rsut: + rsutcs: + ta: + tas: + ts: + ua: + va: + wap: + zg: + additional_datasets: + - {dataset: MERRA, project: OBS, tier: 3, mip: Amon, + type: reanaly, version: 5.2.0, start_year: 1979, end_year: 2015} + scripts: null + MERRA2: description: MERRA2 check diff --git a/esmvaltool/recipes/examples/recipe_easy_ipcc.yml b/esmvaltool/recipes/examples/recipe_easy_ipcc.yml new file mode 100644 index 0000000000..40040e2835 --- /dev/null +++ b/esmvaltool/recipes/examples/recipe_easy_ipcc.yml @@ -0,0 +1,128 @@ +documentation: + title: Easy IPCC + description: Reproduce part of IPCC AR6 figure 9.3a. + references: + - fox-kemper21ipcc + authors: + - kalverla_peter + - andela_bouwe + maintainer: + - andela_bouwe + +preprocessors: + easy_ipcc: + custom_order: true + anomalies: + period: month + reference: + start_year: 1950 + start_month: 1 + start_day: 1 + end_year: 1979 + end_month: 12 + end_day: 31 + area_statistics: + operator: mean + annual_statistics: + operator: mean + convert_units: + units: 'degrees_C' + ensemble_statistics: + statistics: + - operator: mean + multi_model_statistics: + statistics: + - operator: mean + - operator: percentile + percent: 17 + - operator: percentile + percent: 83 + span: full + keep_input_datasets: false + ignore_scalar_coords: true + +diagnostics: + AR6_Figure_9.3: + variables: + tos_ssp585: + short_name: tos + exp: ['historical', 'ssp585'] + project: CMIP6 + mip: Omon + preprocessor: easy_ipcc + timerange: '1850/2100' + tos_ssp126: + short_name: tos + exp: ['historical', 'ssp126'] + project: CMIP6 + mip: Omon + timerange: '1850/2100' + preprocessor: easy_ipcc + scripts: + Figure_9.3a: + script: examples/make_plot.py + +datasets: + - {dataset: ACCESS-CM2, ensemble: 'r(1:5)i1p1f1', grid: gn} + - {dataset: ACCESS-ESM1-5, ensemble: 'r(1:40)i1p1f1', grid: gn} + - {dataset: AWI-CM-1-1-MR, ensemble: r1i1p1f1, grid: gn} + - {dataset: BCC-CSM2-MR, ensemble: r1i1p1f1, grid: gn} + # - {dataset: CAMS-CSM1-0, ensemble: 'r(1:2)i1p1f1', grid: gn} # available data does not fully cover timerange + - {dataset: CAS-ESM2-0, ensemble: r1i1p1f1, grid: gn} + - {dataset: CAS-ESM2-0, ensemble: r3i1p1f1, grid: gn} + - {dataset: CESM2, ensemble: r4i1p1f1, grid: gn} + - {dataset: CESM2, ensemble: 'r(10:11)i1p1f1', grid: gn} + - {dataset: CESM2-WACCM, ensemble: r1i1p1f1, grid: gn} + - {dataset: CIESM, ensemble: r1i1p1f1, grid: gn} + - {dataset: CMCC-CM2-SR5, ensemble: r1i1p1f1, grid: gn} + - {dataset: CMCC-ESM2, ensemble: r1i1p1f1, grid: gn} + - {dataset: CNRM-CM6-1, ensemble: 'r(1:6)i1p1f2', grid: gn} + - {dataset: CNRM-CM6-1-HR, ensemble: r1i1p1f2, grid: gn} + - {dataset: CNRM-ESM2-1, ensemble: 'r(1:5)i1p1f2', grid: gn} + - {dataset: CanESM5, ensemble: 'r(1:25)i1p(1:2)f1', grid: gn} + - {dataset: CanESM5-1, ensemble: 'r1i1p(1:2)f1', grid: gn, institute: CCCma} + - {dataset: CanESM5-CanOE, ensemble: 'r(1:3)i1p2f1', grid: gn} + - {dataset: EC-Earth3, ensemble: r1i1p1f1, grid: gn} + - {dataset: EC-Earth3, ensemble: r4i1p1f1, grid: gn} + - {dataset: EC-Earth3, ensemble: r6i1p1f1, grid: gn} + # - {dataset: EC-Earth3, ensemble: r9i1p1f1, grid: gn} # download failure of ssp585 + - {dataset: EC-Earth3, ensemble: r11i1p1f1, grid: gn} + - {dataset: EC-Earth3, ensemble: r15i1p1f1, grid: gn} + # - {dataset: EC-Earth3, ensemble: 'r(101:150)i1p1f1', grid: gn} # available data does not fully cover timerange + - {dataset: EC-Earth3-Veg, ensemble: 'r(1:4)i1p1f1', grid: gn} + - {dataset: EC-Earth3-Veg, ensemble: r6i1p1f1, grid: gn} + # - {dataset: EC-Earth3-Veg-LR, ensemble: 'r(1:3)i1p1f1', grid: gn} # mismatch between i and j coordinate names between historical and ssp experiment + - {dataset: FGOALS-f3-L, ensemble: 'r(1:3)i1p1f1', grid: gn} + - {dataset: FGOALS-g3, ensemble: 'r(1:4)i1p1f1', grid: gn} + - {dataset: FIO-ESM-2-0, ensemble: 'r(1:3)i1p1f1', grid: gn} + - {dataset: GFDL-ESM4, ensemble: r1i1p1f1, grid: gn} + - {dataset: GISS-E2-1-G, ensemble: 'r(1:4)i1p5f1', grid: gn, supplementary_variables: [{short_name: areacella, mip: fx, exp: piControl, ensemble: r1i1p5f1}, {short_name: areacello, skip: true}]} + - {dataset: GISS-E2-1-G, ensemble: 'r(1:5)i1p1f2', grid: gn, supplementary_variables: [{short_name: areacella, mip: fx, exp: piControl, ensemble: r1i1p1f1}, {short_name: areacello, skip: true}]} + - {dataset: GISS-E2-1-G, ensemble: 'r(1:5)i1p3f1', grid: gn, supplementary_variables: [{short_name: areacella, mip: fx, exp: piControl, ensemble: r1i1p3f1}, {short_name: areacello, skip: true}]} + - {dataset: GISS-E2-1-H, ensemble: 'r(1:5)i1p1f2', grid: gn, supplementary_variables: [{short_name: areacella, mip: fx, exp: piControl, ensemble: r1i1p1f1}, {short_name: areacello, skip: true}]} + - {dataset: GISS-E2-1-H, ensemble: 'r(1:5)i1p3f1', grid: gn, supplementary_variables: [{short_name: areacella, mip: fx, exp: piControl, ensemble: r1i1p3f1}, {short_name: areacello, skip: true}]} + - {dataset: GISS-E2-2-G, ensemble: 'r(1:5)i1p3f1', grid: gn, supplementary_variables: [{short_name: areacella, mip: fx, exp: piControl, ensemble: r1i1p1f1}, {short_name: areacello, skip: true}]} + - {dataset: HadGEM3-GC31-LL, ensemble: r1i1p1f3, grid: gn} + - {dataset: HadGEM3-GC31-MM, ensemble: r1i1p1f3, grid: gn} + # - {dataset: IITM-ESM, ensemble: r1i1p1f1, grid: gn} # available data does not fully cover timerange + - {dataset: INM-CM4-8, ensemble: r1i1p1f1, grid: gr1} + - {dataset: INM-CM5-0, ensemble: r1i1p1f1, grid: gr1} + - {dataset: IPSL-CM6A-LR, ensemble: 'r(1:4)i1p1f1', grid: gn} + - {dataset: IPSL-CM6A-LR, ensemble: r6i1p1f1, grid: gn} + - {dataset: IPSL-CM6A-LR, ensemble: r14i1p1f1, grid: gn} + # - {dataset: KACE-1-0-G, ensemble: 'r(1:3)i1p1f1', grid: gr} # unstructured grid but no cell area information available + # - {dataset: KIOST-ESM, ensemble: r1i1p1f1, grid: gr1} # historical and ssp126 experiment are on different grids + - {dataset: MCM-UA-1-0, ensemble: r1i1p1f2, grid: gn} + - {dataset: MIROC-ES2H, ensemble: r1i1p4f2, grid: gn} + - {dataset: MIROC-ES2L, ensemble: 'r(1:10)i1p1f2', grid: gn} + - {dataset: MIROC6, ensemble: 'r(1:50)i1p1f1', grid: gn} + - {dataset: MPI-ESM1-2-HR, ensemble: 'r1i1p1f1', grid: gn} + # - {dataset: MPI-ESM1-2-HR, ensemble: 'r(1:2)i1p1f1', grid: gn} # second ensemble member causes warnings about large graphs in `concatenate` preprocessor step + - {dataset: MPI-ESM1-2-LR, ensemble: 'r(1:30)i1p1f1', grid: gn} + - {dataset: MRI-ESM2-0, ensemble: 'r(1:5)i1p1f1', grid: gn} + # - {dataset: NESM3, ensemble: 'r(1:2)i1p1f1', grid: gn} # cannot be used due to https://github.com/ESMValGroup/ESMValCore/issues/2101 + # - {dataset: NorESM2-LM, ensemble: r1i1p1f1, grid: gn} # duplicated areacello file with wrong name + - {dataset: NorESM2-MM, ensemble: r1i1p1f1, grid: gn} + - {dataset: TaiESM1, ensemble: r1i1p1f1, grid: gn} + - {dataset: UKESM1-0-LL, ensemble: 'r(1:4)i1p1f2', grid: gn} + - {dataset: UKESM1-0-LL, ensemble: r8i1p1f2, grid: gn} diff --git a/esmvaltool/recipes/examples/recipe_preprocessor_test.yml b/esmvaltool/recipes/examples/recipe_preprocessor_test.yml index df97678ac7..cce4e89df6 100644 --- a/esmvaltool/recipes/examples/recipe_preprocessor_test.yml +++ b/esmvaltool/recipes/examples/recipe_preprocessor_test.yml @@ -75,10 +75,16 @@ preprocessors: multi_model_statistics: false - # Simple ensemble mean and median for multiple models + # Simple ensemble mean, median, and percentiles for multiple models preprocessor_5: ensemble_statistics: - statistics: [mean, median] + statistics: + - mean + - median + - operator: percentile + percent: 5 + - operator: percentile + percent: 95 exclude: [GFDL-ESM2G] # Calculate ensemble means, then multi-model mean diff --git a/esmvaltool/recipes/examples/recipe_python.yml b/esmvaltool/recipes/examples/recipe_python.yml index a6309d826e..d85e1ae437 100644 --- a/esmvaltool/recipes/examples/recipe_python.yml +++ b/esmvaltool/recipes/examples/recipe_python.yml @@ -29,7 +29,7 @@ documentation: datasets: - {dataset: BCC-ESM1, project: CMIP6, exp: historical, ensemble: r1i1p1f1, grid: gn} - - {dataset: bcc-csm1-1, project: CMIP5, exp: historical, ensemble: r1i1p1} + - {dataset: bcc-csm1-1, version: v1, project: CMIP5, exp: historical, ensemble: r1i1p1} preprocessors: # See https://docs.esmvaltool.org/projects/esmvalcore/en/latest/recipe/preprocessor.html diff --git a/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figures_92_95.yml b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figures_92_95.yml index 298b2bbc07..80286e716a 100644 --- a/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figures_92_95.yml +++ b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figures_92_95.yml @@ -267,10 +267,10 @@ diagnostics: variables: pr: preprocessor: clim - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.2 mip: Amon additional_datasets: - - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.2, + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, start_year: 1986, end_year: 2005, tier: 1} - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1986, end_year: 2005} @@ -385,7 +385,7 @@ diagnostics: mip: Amon derive: true additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2010, tier: 1} - {dataset: ACCESS1-0, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1986, end_year: 2005} diff --git a/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figures_938_941_cmip6.yml b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figures_938_941_cmip6.yml index dad979e8aa..e67625b257 100644 --- a/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figures_938_941_cmip6.yml +++ b/esmvaltool/recipes/ipccwg1ar5ch9/recipe_flato13ipcc_figures_938_941_cmip6.yml @@ -322,13 +322,13 @@ diagnostics: variables: pr: preprocessor: clima_nomask_pr - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.2 mip: Amon ensemble: r1i1p1 start_year: 1980 end_year: 2005 additional_datasets: - - {dataset: GPCP-SG, project: OBS, project: obs4MIPs, level: L3, version: v2.2, tier: 1} + - {dataset: GPCP-V2.2, project: obs4MIPs, tier: 1} # - {dataset: CRU, project: OBS, type: reanaly, version: TS4.02, tier: 2} scripts: clim: diff --git a/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_basics.yml b/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_basics.yml new file mode 100644 index 0000000000..06f8ad3b12 --- /dev/null +++ b/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_basics.yml @@ -0,0 +1,260 @@ +# ESMValTool +--- +documentation: + title: Basic Model Evaluation. + description: > + Show plots of several variables that can be used for basic model + evaluations ("sanity checks"). + authors: + - hassler_birgit + - lauer_axel + - bonnet_pauline + - schlund_manuel + maintainer: + - hassler_birgit + + +# Note: The following models are just examples +datasets: + - {project: CMIP6, dataset: MPI-ESM1-2-HR, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {project: CMIP6, dataset: MPI-ESM1-2-LR, exp: historical, ensemble: r1i1p1f1, grid: gn} + +# Note: for some observational datasets, we use preset time ranges due to +# their limited temporal availability +timerange_for_models: &time_period + timerange: '2003/2007' # can be specified, this is just an example + + +preprocessors: + + timeseries_regular: &pp_timeseries_regular + area_statistics: + operator: mean + + timeseries_regular_ann: + <<: *pp_timeseries_regular + annual_statistics: + operator: mean + + timeseries_regular_pr: + <<: *pp_timeseries_regular + convert_units: + units: mm day-1 + + full_climatology: &pp_full_climatology + climate_statistics: + period: full + regrid: + target_grid: 2x2 + scheme: + reference: esmf_regrid.schemes:ESMFAreaWeighted + + full_climatology_pr: + <<: *pp_full_climatology + convert_units: + units: mm day-1 + + zonal_mean: + custom_order: true # makes preprocessor much faster since input for extract_levels is smaller + climate_statistics: + period: full + extract_levels: + levels: {cmor_table: CMIP6, coordinate: plev19} + scheme: linear + coordinate: air_pressure + regrid: + scheme: + reference: esmf_regrid.schemes:ESMFAreaWeighted + target_grid: 2x2 + zonal_statistics: + operator: mean + + +diagnostics: + + # Climatologies - maps (full climatology) + + plot_maps_with_references_tas: + description: Plot climatology maps including reference datasets for tas. + variables: + tas: + <<: *time_period + mip: Amon + preprocessor: full_climatology + additional_datasets: + - {project: native6, dataset: ERA5, type: reanaly, version: v1, tier: 3, reference_for_monitor_diags: true} + scripts: + plot: &plot_multi_dataset_default + plot_folder: '{plot_dir}' + plot_filename: '{plot_type}_{real_name}_{dataset}_{mip}' + script: monitor/multi_datasets.py + plots: + map: + common_cbar: true + + plot_maps_with_references_pr: + description: Plot climatology maps including reference datasets for pr. + variables: + pr: + <<: *time_period + mip: Amon + preprocessor: full_climatology_pr + additional_datasets: + - {project: OBS, dataset: GPCP-SG, type: atmos, version: 2.3, tier: 2, reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + script: monitor/multi_datasets.py + plots: + map: + common_cbar: true + plot_kwargs: + default: + cmap: Blues + + # Climatologies (zonal means) + + plot_zonal_mean_profiles_with_references_ta: + description: Plot 2D zonal mean profiles including reference datasets. + variables: + ta: + <<: *time_period + mip: Amon + preprocessor: zonal_mean + additional_datasets: + - {project: native6, dataset: ERA5, type: reanaly, version: v1, tier: 3, reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + script: monitor/multi_datasets.py + plots: + zonal_mean_profile: + common_cbar: true + + plot_zonal_mean_profiles_with_references_ua: + description: Plot 2D zonal mean profiles including reference datasets. + variables: + ua: + <<: *time_period + mip: Amon + preprocessor: zonal_mean + additional_datasets: + - {project: native6, dataset: ERA5, type: reanaly, version: v1, tier: 3, reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + script: monitor/multi_datasets.py + plots: + zonal_mean_profile: + common_cbar: true + plot_kwargs: + default: + cmap: Blues + + plot_zonal_mean_profiles_with_references_hus: + description: Plot 2D zonal mean profiles including reference datasets. + variables: + hus: + <<: *time_period + mip: Amon + preprocessor: zonal_mean + additional_datasets: + - {project: native6, dataset: ERA5, type: reanaly, version: v1, tier: 3, reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + script: monitor/multi_datasets.py + plots: + zonal_mean_profile: + common_cbar: true + plot_kwargs: + default: + cmap: Blues + + # Time series of global averages (monthly) + + plot_multiple_timeseries: + description: Plot time series including reference datasets. + variables: + tas: + <<: *time_period + mip: Amon + preprocessor: timeseries_regular + additional_datasets: + - {project: native6, dataset: ERA5, type: reanaly, version: v1, tier: 3, reference_for_monitor_diags: true} + clt: + <<: *time_period + mip: Amon + preprocessor: timeseries_regular + additional_datasets: + - {project: OBS, dataset: ESACCI-CLOUD, type: sat, version: AVHRR-AMPM-fv3.0, tier: 2, reference_for_monitor_diags: true} + rsut: + <<: *time_period + mip: Amon + preprocessor: timeseries_regular + additional_datasets: + - {project: OBS, dataset: CERES-EBAF, type: sat, version: Ed4.1, tier: 2, reference_for_monitor_diags: true} + rlut: + <<: *time_period + mip: Amon + preprocessor: timeseries_regular + additional_datasets: + - {project: OBS, dataset: CERES-EBAF, type: sat, version: Ed4.1, tier: 2, reference_for_monitor_diags: true} + rtnt: + derive: true + force_derivation: true + mip: Amon + preprocessor: timeseries_regular_ann + timerange: '1995/2014' + prw: + <<: *time_period + mip: Amon + preprocessor: timeseries_regular + # timerange MUST NOT start before 2003 since the observations are not available before 2003 + additional_datasets: + - {project: OBS, dataset: ESACCI-WATERVAPOUR, type: sat, version: CDR2-L3S-05deg_fv3.1, tier: 3, timerange: '2003/2007', reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + group_variables_by: variable_group + script: monitor/multi_datasets.py + plots: + timeseries: + annual_mean_kwargs: false + plot_kwargs: + MPI-ESM1-2-HR: + color: C0 + MPI-ESM1-2-LR: + color: C1 + ERA5: + color: black + ESACCI-CLOUD: + color: black + CERES-EBAF: + color: black + ESACCI-WATERVAPOUR: + color: black + + plot_multiple_timeseries_pr: + description: Plot time series including reference datasets. + variables: + pr: + <<: *time_period + mip: Amon + preprocessor: timeseries_regular_pr + additional_datasets: + - {project: OBS, dataset: GPCP-SG, type: atmos, version: 2.3, tier: 2, reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + script: monitor/multi_datasets.py + plots: + timeseries: + annual_mean_kwargs: false + plot_kwargs: + MPI-ESM1-2-HR: + color: C0 + MPI-ESM1-2-LR: + color: C1 + GPCP-SG: + color: black diff --git a/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_clouds_clim.yml b/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_clouds_clim.yml new file mode 100644 index 0000000000..fd2d08781f --- /dev/null +++ b/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_clouds_clim.yml @@ -0,0 +1,226 @@ +# ESMValTool +--- +documentation: + title: Model evaluation with focus on clouds. + description: > + Plot climatologies several cloud-related variables of multi-year + simulations. + authors: + - bonnet_pauline + - lauer_axel + - hassler_birgit + - schlund_manuel + maintainer: + - lauer_axel + + +# Note: the following models are just examples +datasets: + - {project: CMIP6, dataset: MPI-ESM1-2-HR, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {project: CMIP6, dataset: MPI-ESM1-2-LR, exp: historical, ensemble: r1i1p1f1, grid: gn} + +# Note: for some observational datasets, we use preset time ranges due to +# their limited temporal availability +timerange_for_models: &time_period + timerange: '2005/2014' # can be specified, this is just an example + + +preprocessors: + + full_climatology: &full_climatology_diag + climate_statistics: + period: full + regrid: + target_grid: 2x2 + scheme: + reference: esmf_regrid.schemes:ESMFAreaWeighted + + full_climatology_pr: + <<: *full_climatology_diag + convert_units: + units: mm day-1 + + zonal_mean: + custom_order: true # makes preprocessor much faster since input for extract_levels is smaller + climate_statistics: + period: full + extract_levels: + levels: {cmor_table: CMIP6, coordinate: plev19} + scheme: linear + coordinate: air_pressure + regrid: + scheme: + reference: esmf_regrid.schemes:ESMFAreaWeighted + target_grid: 2x2 + zonal_statistics: + operator: mean + + +diagnostics: + + plot_clt_maps: + description: Plot clt climatology maps including reference datasets. + variables: + clt: + <<: *time_period + mip: Amon + preprocessor: full_climatology + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, version: AVHRR-AMPM-fv3.0, tier: 2, reference_for_monitor_diags: true} + scripts: + plot: &plot_multi_dataset_default + script: monitor/multi_datasets.py + plot_folder: '{plot_dir}' + plot_filename: '{plot_type}_{real_name}_{dataset}_{mip}' + plots: + map: + common_cbar: true + + plot_lwcre_maps: + description: Plot lwcre climatology maps including reference datasets. + variables: + lwcre: + <<: *time_period + mip: Amon + preprocessor: full_climatology + derive: true + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, timerange: '2001/2010', tier: 1, reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + plots: + map: + common_cbar: true + + plot_swcre_maps: + description: Plot swcre climatology maps including reference datasets. + variables: + swcre: + <<: *time_period + mip: Amon + preprocessor: full_climatology + derive: true + additional_datasets: + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1, timerange: '2001/2010', reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + plots: + map: + common_cbar: true + + plot_lwp_maps: + description: Plot lwp climatology maps including reference datasets. + variables: + lwp: + <<: *time_period + mip: Amon + preprocessor: full_climatology + derive: true + additional_datasets: + - {project: native6, dataset: ERA5, type: reanaly, version: v1, tier: 3, reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + plots: + map: + common_cbar: true + fontsize: 6 + + plot_clivi_maps: + description: Plot clivi climatology maps including reference datasets. + variables: + clivi: + <<: *time_period + mip: Amon + preprocessor: full_climatology + additional_datasets: + - {project: native6, dataset: ERA5, type: reanaly, version: v1, tier: 3, reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + plots: + map: + common_cbar: true + + plot_prw_maps: + description: Plot prw climatology maps including reference datasets. + variables: + prw: + <<: *time_period + mip: Amon + preprocessor: full_climatology + additional_datasets: + - {dataset: ESACCI-WATERVAPOUR, project: OBS, type: sat, version: CDR2-L3S-05deg_fv3.1, tier: 3, timerange: '2003/2017', reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + plots: + map: + common_cbar: true + + plot_pr_maps: + description: Plot prw climatology maps including reference datasets. + variables: + pr: + <<: *time_period + mip: Amon + preprocessor: full_climatology_pr + additional_datasets: + - {dataset: GPCP-SG, project: OBS, type: atmos, version: 2.3, tier: 2, + timerange: '2003/2017', reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + plots: + map: + common_cbar: true + + plot_clw_profiles: + description: Plot clw vertical profiles including reference datasets. + variables: + clw: + <<: *time_period + mip: Amon + preprocessor: zonal_mean + additional_datasets: + - {dataset: CLOUDSAT-L2, project: OBS, type: sat, version: P1-R05-gridbox-average-noprecip, timerange: '2006/2017', tier: 3, reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + plots: + profile: + common_cbar: true + + plot_cli_profiles: + description: Plot cli vertical profiles including reference datasets. + variables: + cli: + <<: *time_period + mip: Amon + preprocessor: zonal_mean + additional_datasets: + - {dataset: CALIPSO-ICECLOUD, project: OBS, type: sat, version: 1-00, timerange: '2007/2015', tier: 3, reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + plots: + profile: + common_cbar: true + + plot_cl_profiles: + description: Plot cl vertical profiles including reference datasets. + variables: + cl: + <<: *time_period + mip: Amon + preprocessor: zonal_mean + additional_datasets: + - {project: native6, dataset: ERA5, type: reanaly, version: v1, tier: 3, reference_for_monitor_diags: true} + scripts: + plot: + <<: *plot_multi_dataset_default + plots: + profile: + common_cbar: true diff --git a/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_clouds_cycles.yml b/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_clouds_cycles.yml new file mode 100644 index 0000000000..ed52fd7d3c --- /dev/null +++ b/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_clouds_cycles.yml @@ -0,0 +1,178 @@ +# ESMValTool +--- +documentation: + title: Model evaluation with focus on clouds. + description: > + Plot annual cycles of several cloud-related variables of multi-year simulations. + authors: + - lauer_axel + - schlund_manuel + maintainer: + - lauer_axel + + +# Note: the following models are just examples +datasets: + - {project: CMIP6, dataset: MPI-ESM1-2-HR, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {project: CMIP6, dataset: MPI-ESM1-2-LR, exp: historical, ensemble: r1i1p1f1, grid: gn} + +# Note: for some observational datasets, we use preset time ranges due to +# their limited temporal availability +timerange_for_models: &time_period + timerange: '2000/2014' # can be specified, this is just an example + + +preprocessors: + + pp_global: &global_settings + area_statistics: + operator: mean + climate_statistics: + period: month + + pp_SEPacific: + <<: *global_settings + extract_region: + start_longitude: 265 + end_longitude: 275 + start_latitude: -25 + end_latitude: -5 + mask_landsea: + mask_out: land + + pp_SouthernOcean: + <<: *global_settings + extract_region: + start_longitude: 0 + end_longitude: 360 + start_latitude: -65 + end_latitude: -30 + mask_landsea: + mask_out: land + + pp_StormTracks: + <<: *global_settings + extract_region: + start_longitude: 0 + end_longitude: 360 + start_latitude: 45 + end_latitude: 60 + + +diagnostics: + + anncyc: + description: Plot annual cycles including reference datasets. + variables: + clt_global: &clt_settings + <<: *time_period + preprocessor: pp_global + short_name: clt + mip: Amon + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, version: AVHRR-AMPM-fv3.0, tier: 2} + clt_tropics: + <<: *clt_settings + clt_sepacific: + <<: *clt_settings + preprocessor: pp_SEPacific + clt_southerocean: + <<: *clt_settings + preprocessor: pp_SouthernOcean + clt_stormtracks: + <<: *clt_settings + preprocessor: pp_StormTracks + clivi_global: &clivi_settings + <<: *time_period + preprocessor: pp_global + short_name: clivi + mip: Amon + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, version: AVHRR-AMPM-fv3.0, tier: 2} + clivi_tropics: + <<: *clivi_settings + clivi_sepacific: + <<: *clivi_settings + preprocessor: pp_SEPacific + clivi_southerocean: + <<: *clivi_settings + preprocessor: pp_SouthernOcean + clivi_stormtracks: + <<: *clivi_settings + preprocessor: pp_StormTracks + lwp_global: &lwp_settings + <<: *time_period + preprocessor: pp_global + short_name: lwp + derive: true + mip: Amon + additional_datasets: + - {dataset: ESACCI-CLOUD, project: OBS, type: sat, version: AVHRR-AMPM-fv3.0, tier: 2} + lwp_tropics: + <<: *lwp_settings + lwp_sepacific: + <<: *lwp_settings + preprocessor: pp_SEPacific + lwp_southerocean: + <<: *lwp_settings + preprocessor: pp_SouthernOcean + lwp_stormtracks: + <<: *lwp_settings + preprocessor: pp_StormTracks + swcre_global: &swcre_settings + <<: *time_period + preprocessor: pp_global + short_name: swcre + derive: true + mip: Amon + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, tier: 2} + swcre_tropics: + <<: *swcre_settings + swcre_sepacific: + <<: *swcre_settings + preprocessor: pp_SEPacific + swcre_southerocean: + <<: *swcre_settings + preprocessor: pp_SouthernOcean + swcre_stormtracks: + <<: *swcre_settings + preprocessor: pp_StormTracks + lwcre_global: &lwcre_settings + <<: *time_period + preprocessor: pp_global + short_name: lwcre + derive: true + mip: Amon + additional_datasets: + - {dataset: CERES-EBAF, project: OBS, type: sat, version: Ed4.1, tier: 2} + lwcre_tropics: + <<: *lwcre_settings + lwcre_sepacific: + <<: *lwcre_settings + preprocessor: pp_SEPacific + lwcre_southerocean: + <<: *lwcre_settings + preprocessor: pp_SouthernOcean + lwcre_stormtracks: + <<: *lwcre_settings + preprocessor: pp_StormTracks + scripts: + allplots: + script: monitor/multi_datasets.py + plot_folder: '{plot_dir}' + plot_filename: '{plot_type}_{real_name}_{mip}' + group_variables_by: variable_group + plots: + annual_cycle: + legend_kwargs: + loc: upper right + plot_kwargs: + MPI-ESM1-2-HR: + color: C0 + MPI-ESM1-2-LR: + color: C1 + ESACCI-CLOUD: + color: black + pyplot_kwargs: + title: '{short_name}' diff --git a/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_precip_zonal.yml b/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_precip_zonal.yml new file mode 100644 index 0000000000..6bd1231046 --- /dev/null +++ b/esmvaltool/recipes/model_evaluation/recipe_model_evaluation_precip_zonal.yml @@ -0,0 +1,72 @@ +# ESMValTool +--- +documentation: + title: Model evaluation with focus on precipitation. + description: > + Plot zonal mean plots of precipitation. + authors: + - lauer_axel + - schlund_manuel + maintainer: + - lauer_axel + + +# Note: the following models are just examples +datasets: + - {project: CMIP6, dataset: MPI-ESM1-2-HR, exp: historical, ensemble: r1i1p1f1, grid: gn} + - {project: CMIP6, dataset: MPI-ESM1-2-LR, exp: historical, ensemble: r1i1p1f1, grid: gn} + +# Note: for some observational datasets, we use preset time ranges due to +# their limited temporal availability +timerange_for_models: &time_period + timerange: '2000/2014' # can be specified, this is just an example + + +preprocessors: + + pp_zonal: + regrid: + target_grid: 2x2 + scheme: + reference: esmf_regrid.schemes:ESMFAreaWeighted + zonal_statistics: + operator: mean + climate_statistics: + operator: mean + period: full + convert_units: + units: mm day-1 + + +diagnostics: + + zonal: + description: Plot annual cycles including reference datasets. + variables: + pr: + <<: *time_period + preprocessor: pp_zonal + mip: Amon + additional_datasets: + - {dataset: ERA5, project: native6, type: reanaly, version: 'v1', tier: 3} + - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.3, tier: 1} + scripts: + allplots: + script: monitor/multi_datasets.py + plot_folder: '{plot_dir}' + plot_filename: '{plot_type}_{real_name}_{mip}' + group_variables_by: variable_group + plots: + variable_vs_lat: + legend_kwargs: + loc: upper right + plot_kwargs: + MPI-ESM1-2-HR: + color: C0 + MPI-ESM1-2-LR: + color: C1 + ERA5: + color: black + linestyle: dotted + GPCP-SG: + color: black diff --git a/esmvaltool/recipes/monitor/recipe_monitor_with_refs.yml b/esmvaltool/recipes/monitor/recipe_monitor_with_refs.yml index 0d1415979a..681277310c 100644 --- a/esmvaltool/recipes/monitor/recipe_monitor_with_refs.yml +++ b/esmvaltool/recipes/monitor/recipe_monitor_with_refs.yml @@ -1,12 +1,17 @@ # ESMValTool --- documentation: - title: Model Monitoring + title: Example recipe for model monitoring with reference datasets. description: | Show plots that include multiple datasets that can be used to monitor (ongoing) model simulations. authors: - schlund_manuel + - heuer_helge + - kraft_jeremy + - kuehbacher_birgit + - ruhe_lukas + - sarauer_ellen - winterstein_franziska maintainer: - schlund_manuel @@ -14,8 +19,8 @@ documentation: datasets: # Note: plot_label currently only used by diagnostic plot_multiple_annual_cycles - - {project: CMIP6, dataset: EC-Earth3, exp: historical, ensemble: r1i1p1f1, grid: gr, plot_label: 'EC-Earth3 historical'} - - {project: CMIP6, dataset: CanESM5, exp: historical, ensemble: r1i1p1f1, grid: gn, plot_label: 'Reference (CanESM5 historical)', reference_for_monitor_diags: true} + - {project: CMIP6, dataset: MPI-ESM1-2-HR, exp: historical, ensemble: r1i1p1f1, grid: gn, plot_label: 'MPI-ESM1-2-HR historical'} + - {project: CMIP6, dataset: MPI-ESM1-2-LR, exp: historical, ensemble: r1i1p1f1, grid: gn, plot_label: 'Reference (MPI-ESM1-2-LR historical)', reference_for_monitor_diags: true} preprocessors: @@ -67,6 +72,36 @@ preprocessors: scheme: linear coordinate: air_pressure + var_vs_lat: + climate_statistics: + operator: mean + regrid: + target_grid: 2x2 + scheme: linear + zonal_statistics: + operator: mean + convert_units: + units: mm day-1 + + global_mean_extract_levels: + custom_order: true + extract_levels: + levels: {cmor_table: CMIP6, coordinate: alt16} + scheme: linear + coordinate: altitude + regrid: + target_grid: 2x2 + scheme: linear + area_statistics: + operator: mean + + zonal_mean_2d: + regrid: + target_grid: 2x2 + scheme: linear + zonal_statistics: + operator: mean + diagnostics: @@ -87,9 +122,9 @@ diagnostics: annual_mean_kwargs: linestyle: '--' plot_kwargs: - EC-Earth3: # = dataset since 'facet_used_for_labels' is 'dataset' by default + MPI-ESM1-2-HR: # = dataset since 'facet_used_for_labels' is 'dataset' by default color: C0 - CanESM5: + MPI-ESM1-2-LR: color: black plot_multiple_annual_cycles: @@ -108,9 +143,9 @@ diagnostics: legend_kwargs: loc: upper right plot_kwargs: - 'EC-Earth3 historical': # = plot_label since 'facet_used_for_labels: plot_label' + 'MPI-ESM1-2-HR historical': # = plot_label since 'facet_used_for_labels: plot_label' color: C0 - 'Reference (CanESM5 historical)': + 'Reference (MPI-ESM1-2-LR historical)': color: black pyplot_kwargs: title: Near-Surface Air Temperature on Northern Hemisphere @@ -150,6 +185,7 @@ diagnostics: plot_kwargs_bias: levels: [-10.0, -7.5, -5.0, -2.5, 0.0, 2.5, 5.0, 7.5, 10.0] + plot_1D_profiles_with_references: description: Plot 1D profiles including reference datasets. variables: @@ -164,7 +200,58 @@ diagnostics: plots: 1d_profile: plot_kwargs: - EC-Earth3: # = dataset since 'facet_used_for_labels' is 'dataset' by default + MPI-ESM1-2-HR: # = dataset since 'facet_used_for_labels' is 'dataset' by default color: C0 - CanESM5: + MPI-ESM1-2-LR: color: black + + plot_variable_vs_latitude: + description: Creates a single-panel variable plot over latitude. + variables: + pr: + preprocessor: var_vs_lat + mip: Amon + timerange: '20000101/20030101' + scripts: + plot: + <<: *plot_multi_dataset_default + script: monitor/multi_datasets.py + plots: + variable_vs_lat: + + plot_hovmoeller_z_vs_time: + description: Plot Hovmoeller Z vs. time including reference datasets. + variables: + ta: + preprocessor: global_mean_extract_levels + mip: Amon + timerange: '2000/2004' + scripts: + plot: + <<: *plot_multi_dataset_default + script: monitor/multi_datasets.py + plots: + hovmoeller_z_vs_time: + plot_func: contourf + common_cbar: true + time_format: '%Y' + log_y: false + pyplot_kwargs: + ylim: [0, 20000] + + plot_time_vs_lat_with_references: + description: Plot Hovmoeller time vs. latitude including reference datasets. + variables: + tas: + mip: Amon + preprocessor: zonal_mean_2d + timerange: '2000/2004' + scripts: + plot: + <<: *plot_multi_dataset_default + script: monitor/multi_datasets.py + plots: + hovmoeller_time_vs_lat_or_lon: + common_cbar: true + show_x_minor_ticks: false + time_format: '%Y' diff --git a/esmvaltool/recipes/recipe_autoassess_landsurface_soilmoisture.yml b/esmvaltool/recipes/recipe_autoassess_landsurface_soilmoisture.yml index a5491cc0b9..061a6a6dfa 100644 --- a/esmvaltool/recipes/recipe_autoassess_landsurface_soilmoisture.yml +++ b/esmvaltool/recipes/recipe_autoassess_landsurface_soilmoisture.yml @@ -4,9 +4,6 @@ documentation: description: | Recipe that runs the Autoassess Land-surface assessment area diagnostic. - Climatological files are stored externally to avoid overloading the - ESMValTool source. See /gws/nopw/j04/esmeval/autoassess_specific_files - (on JASMIN). authors: - predoi_valeriu @@ -27,10 +24,11 @@ datasets: - {dataset: E3SM-1-0, project: CMIP6, exp: historical, grid: gr, ensemble: r1i1p1f1, start_year: 1992, end_year: 2002} preprocessors: - pp_aa_area: - regrid: # NOT USED - target_grid: 0.15x0.15 - scheme: linear + seasonal: + climate_statistics: + operator: mean + period: seasonal + seasons: ['DJF', 'MAM', 'JJA', 'SON'] diagnostics: aa_landsurf_soilmoisture: @@ -38,17 +36,20 @@ diagnostics: variables: mrsos: # moisture_content_of_soil_layer mip: Lmon + preprocessor: seasonal + sm: # Volumetric Moisture in Upper Portion of Soil Column + mip: Lmon + project: CMIP5 + derive: true + preprocessor: seasonal + additional_datasets: + - {dataset: ESACCI-SOILMOISTURE, project: OBS, type: sat, version: L3S-SSMV-COMBINED-v4.2, tier: 2, start_year: 1999, end_year: 2008} scripts: autoassess_landsurf_soilmoisture: &autoassess_landsurf_soilmoisture_settings - script: autoassess/autoassess_area_base.py - title: "Autoassess Land-Surface Soilmoisture Diagnostic" + script: autoassess/land_surface_soilmoisture/soilmoisture.py area: land_surface_soilmoisture control_model: ACCESS-CM2 exp_model: E3SM-1-0 - obs_models: [] - start: 1993/12/01 - end: 2002/12/01 - climfiles_root: '/gws/nopw/j04/esmeval/autoassess_specific_files/files' # on JASMIN plot_standard: description: Wrapper to collect and plot previously calculated metrics diff --git a/esmvaltool/recipes/recipe_ecs_constraints.yml b/esmvaltool/recipes/recipe_ecs_constraints.yml index 3655503e8a..0abc4f20e6 100644 --- a/esmvaltool/recipes/recipe_ecs_constraints.yml +++ b/esmvaltool/recipes/recipe_ecs_constraints.yml @@ -522,9 +522,9 @@ diagnostics: mip: Amon start_year: 1986 end_year: 2005 - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.2 additional_datasets: - - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.2, tier: 1} + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, tier: 1} additional_datasets: - {dataset: ACCESS1-0} - {dataset: ACCESS1-3} @@ -593,9 +593,9 @@ diagnostics: mip: Amon start_year: 2003 end_year: 2005 - reference_dataset: AIRS + reference_dataset: AIRS-2-1 additional_datasets: - - {dataset: AIRS, project: obs4MIPs, level: L3, version: RetStd-v5, tier: 1} + - {dataset: AIRS-2-1, project: obs4MIPs, level: L3, tier: 1} additional_datasets: - {dataset: ACCESS1-0} - {dataset: ACCESS1-3} @@ -755,7 +755,7 @@ diagnostics: end_year: 2005 reference_dataset: CERES-EBAF additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} rsutcs: <<: *var_settings_cmip5 preprocessor: default @@ -764,7 +764,7 @@ diagnostics: end_year: 2005 reference_dataset: CERES-EBAF additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} rsdt: <<: *var_settings_cmip5 preprocessor: default @@ -773,7 +773,7 @@ diagnostics: end_year: 2005 reference_dataset: CERES-EBAF additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} additional_datasets: - {dataset: ACCESS1-0} - {dataset: ACCESS1-3} @@ -1175,9 +1175,9 @@ diagnostics: exp: [historical, rcp85] start_year: 2005 end_year: 2010 - reference_dataset: 'AIRS-2-0|MLS-AURA' + reference_dataset: AIRS-2-0|MLS-AURA additional_datasets: - - {dataset: AIRS-2-0, project: obs4MIPs, level: L3, version: v2, tier: 1} + - {dataset: AIRS-2-0, project: obs4MIPs, level: L3, tier: 1} - {dataset: MLS-AURA, project: OBS6, type: sat, version: '004', tier: 3} additional_datasets: - {dataset: ACCESS1-0} diff --git a/esmvaltool/recipes/recipe_ecs_scatter.yml b/esmvaltool/recipes/recipe_ecs_scatter.yml index a052caf1c4..eae1034e42 100644 --- a/esmvaltool/recipes/recipe_ecs_scatter.yml +++ b/esmvaltool/recipes/recipe_ecs_scatter.yml @@ -126,14 +126,14 @@ diagnostics: - atmos variables: pr: - reference_dataset: TRMM-L3 + reference_dataset: TRMM mip: Amon exp: historical start_year: 1986 end_year: 2005 project: CMIP5 additional_datasets: - - {dataset: TRMM-L3, project: obs4MIPs, level: v7, version: 7A, + - {dataset: TRMM, project: obs4MIPs, level: v7, start_year: 1998, end_year: 2013, tier: 1} - {dataset: ACCESS1-0, ensemble: r1i1p1} # - {dataset: ACCESS1-3, ensemble: r1i1p1} @@ -162,11 +162,11 @@ diagnostics: - {dataset: MRI-CGCM3, ensemble: r1i1p1} - {dataset: NorESM1-M, ensemble: r1i1p1} prStderr: - reference_dataset: TRMM-L3 + reference_dataset: TRMM mip: Amon frequency: mon additional_datasets: - - {dataset: TRMM-L3, project: obs4MIPs, level: v7, version: 7A, + - {dataset: TRMM, project: obs4MIPs, level: v7, start_year: 1998, end_year: 2013, tier: 1} scripts: ecs_scatter: @@ -188,14 +188,14 @@ diagnostics: variables: hus: preprocessor: pp500 - reference_dataset: AIRS + reference_dataset: AIRS-2-1 mip: Amon exp: historical start_year: 1986 end_year: 2005 project: CMIP5 additional_datasets: - - {dataset: AIRS, project: obs4MIPs, level: L3, version: RetStd-v5, + - {dataset: AIRS-2-1, project: obs4MIPs, level: L3, start_year: 2003, end_year: 2010, tier: 1} - {dataset: ACCESS1-0, ensemble: r1i1p1} # - {dataset: ACCESS1-3, ensemble: r1i1p1} @@ -225,11 +225,11 @@ diagnostics: - {dataset: NorESM1-M, ensemble: r1i1p1} husStderr: preprocessor: pp500 - reference_dataset: AIRS + reference_dataset: AIRS-2-1 mip: Amon frequency: mon additional_datasets: - - {dataset: AIRS, project: obs4MIPs, level: L3, version: RetStd-v5, + - {dataset: AIRS-2-1, project: obs4MIPs, level: L3, start_year: 2003, end_year: 2010, tier: 1} scripts: ecs_scatter: @@ -396,24 +396,24 @@ diagnostics: exp: historical project: CMIP5 additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2005, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2005, + tier: 1} rsutcs: reference_dataset: CERES-EBAF mip: Amon exp: historical project: CMIP5 additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2005, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2005, + tier: 1} rsdt: reference_dataset: CERES-EBAF mip: Amon exp: historical project: CMIP5 additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, - start_year: 2001, end_year: 2005, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, start_year: 2001, end_year: 2005, + tier: 1} scripts: ecs_scatter: diag: covrefl @@ -433,15 +433,14 @@ diagnostics: - atmos variables: pr: - reference_dataset: TRMM-L3 + reference_dataset: TRMM start_year: 1998 end_year: 2013 exp: historical mip: Amon project: CMIP6 additional_datasets: - - {dataset: TRMM-L3, project: obs4MIPs, level: v7, - version: 7A, tier: 1} + - {dataset: TRMM, project: obs4MIPs, level: v7, tier: 1} - {dataset: BCC-CSM2-MR, grid: gn, ensemble: r1i1p1f1} - {dataset: BCC-ESM1, grid: gn, ensemble: r1i1p1f1} - {dataset: CAMS-CSM1-0, grid: gn, ensemble: r1i1p1f1} @@ -457,14 +456,13 @@ diagnostics: - {dataset: MIROC6, grid: gn, ensemble: r1i1p1f1} - {dataset: MRI-ESM2-0, grid: gn, ensemble: r1i1p1f1} prStderr: - reference_dataset: TRMM-L3 + reference_dataset: TRMM mip: Amon frequency: mon start_year: 1998 end_year: 2013 additional_datasets: - - {dataset: TRMM-L3, project: obs4MIPs, level: v7, - version: 7A, tier: 1} + - {dataset: TRMM, project: obs4MIPs, level: v7, tier: 1} scripts: ecs_scatter: diag: itczidx @@ -484,15 +482,14 @@ diagnostics: variables: hus: preprocessor: pp500 - reference_dataset: AIRS + reference_dataset: AIRS-2-1 start_year: 2003 end_year: 2010 exp: historical mip: Amon project: CMIP6 additional_datasets: - - {dataset: AIRS, project: obs4MIPs, level: L3, - version: RetStd-v5, tier: 1} + - {dataset: AIRS-2-1, project: obs4MIPs, level: L3, tier: 1} - {dataset: BCC-CSM2-MR, grid: gn, ensemble: r1i1p1f1} - {dataset: BCC-ESM1, grid: gn, ensemble: r1i1p1f1} - {dataset: CAMS-CSM1-0, grid: gn, ensemble: r1i1p1f1} @@ -509,14 +506,13 @@ diagnostics: - {dataset: MRI-ESM2-0, grid: gn, ensemble: r1i1p1f1} husStderr: preprocessor: pp500 - reference_dataset: AIRS + reference_dataset: AIRS-2-1 mip: Amon frequency: mon start_year: 2003 end_year: 2010 additional_datasets: - - {dataset: AIRS, project: obs4MIPs, level: L3, - version: RetStd-v5, tier: 1} + - {dataset: AIRS-2-1, project: obs4MIPs, level: L3, tier: 1} scripts: ecs_scatter: diag: humidx @@ -689,8 +685,7 @@ diagnostics: mip: Amon project: CMIP6 additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, - version: Ed2-7, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} - {dataset: BCC-CSM2-MR, grid: gn, ensemble: r1i1p1f1} - {dataset: BCC-ESM1, grid: gn, ensemble: r2i1p1f1} # - {dataset: CAMS-CSM1-0, grid: gn, ensemble: r1i1p1f1} @@ -713,8 +708,7 @@ diagnostics: mip: Amon project: CMIP6 additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, - version: Ed2-7, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} - {dataset: BCC-CSM2-MR, grid: gn, ensemble: r1i1p1f1} - {dataset: BCC-ESM1, grid: gn, ensemble: r2i1p1f1} # - {dataset: CAMS-CSM1-0, grid: gn, ensemble: r1i1p1f1} @@ -737,8 +731,7 @@ diagnostics: mip: Amon project: CMIP6 additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, - version: Ed2-7, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} - {dataset: BCC-CSM2-MR, grid: gn, ensemble: r1i1p1f1} - {dataset: BCC-ESM1, grid: gn, ensemble: r2i1p1f1} # - {dataset: CAMS-CSM1-0, grid: gn, ensemble: r1i1p1f1} diff --git a/esmvaltool/recipes/recipe_heatwaves_coldwaves.yml b/esmvaltool/recipes/recipe_heatwaves_coldwaves.yml index 6481e33971..aae6491103 100644 --- a/esmvaltool/recipes/recipe_heatwaves_coldwaves.yml +++ b/esmvaltool/recipes/recipe_heatwaves_coldwaves.yml @@ -27,7 +27,7 @@ datasets: # - {dataset: IPSL-CM5A-MR, type: exp, project: CMIP5, exp: rcp85, # ensemble: r1i1p1, start_year: 2020, end_year: 2040} - {dataset: bcc-csm1-1, type: exp, project: CMIP5, exp: historical, - ensemble: r1i1p1, start_year: 1971, end_year: 2000} + ensemble: r1i1p1, start_year: 1971, end_year: 2000, version: v1} - {dataset: bcc-csm1-1, type: exp, project: CMIP5, exp: rcp85, ensemble: r1i1p1, start_year: 2060, end_year: 2080} diff --git a/esmvaltool/recipes/recipe_iht_toa.yml b/esmvaltool/recipes/recipe_iht_toa.yml new file mode 100644 index 0000000000..19e05f5ac2 --- /dev/null +++ b/esmvaltool/recipes/recipe_iht_toa.yml @@ -0,0 +1,119 @@ +# ESMValTool +# recipe_transport.yml +--- +documentation: + title: Implied heat transport + + description: | + Horizontal implied heat transport of the entire column derived from + TOA radiative fluxes. + +# The 'single_model' script produces Figures 1 to 6 in Pearce and +# Bodas-Salcedo (2023) for each of the datasets chosen. + + authors: + - pearce_francesca + - bodas-salcedo_alejandro + + maintainer: + - bodas-salcedo_alejandro + + references: + - pearce23jclim + +datasets: + # The CERES-EBAF observations reproduce the results of Pearce and + # Bodas-Salcedo (2023), but it can be applied to model data. + - {dataset: CERES-EBAF, project: obs4MIPs, tier: 1, start_year: 2001, end_year: 2010} + # - {dataset: HadGEM3-GC31-LL, project: CMIP6, exp: amip, ensemble: r1i1p1f3, grid: gn, mip: Amon, start_year: 1990, end_year: 2005} + +preprocessors: + # It is recommended to use a low resolution grid as the + # calculations are computationally expensive + common_grid: + regrid: + target_grid: 5.0x5.0 + scheme: area_weighted + + climate_mean_global: + regrid: + target_grid: 5.0x5.0 + scheme: area_weighted + climate_statistics: + operator: mean + +diagnostics: + + implied_heat_transport: + title: TOA implied heat transports + description: Energy flux potential and implied heat transports for TOA radiative fluxes. + themes: + - phys + realms: + - atmos + variables: + rtnt: + derive: true + short_name: rtnt + mip: Amon + preprocessor: climate_mean_global + rtnt_monthly: + derive: true + short_name: rtnt + mip: Amon + preprocessor: common_grid + rsnt: + derive: true + short_name: rsnt + mip: Amon + preprocessor: climate_mean_global + rlut: + short_name: rlut + mip: Amon + preprocessor: climate_mean_global + rlutcs: + short_name: rlutcs + mip: Amon + preprocessor: climate_mean_global + rlutcs_monthly: + short_name: rlutcs + mip: Amon + preprocessor: common_grid + rsutcs: + short_name: rsutcs + mip: Amon + preprocessor: climate_mean_global + rsutcs_monthly: + short_name: rsutcs + mip: Amon + preprocessor: common_grid + rsut: + short_name: rsut + mip: Amon + preprocessor: climate_mean_global + rsdt: + short_name: rsdt + mip: Amon + preprocessor: climate_mean_global + rsdt_monthly: + short_name: rsdt + mip: Amon + preprocessor: common_grid + netcre: + derive: true + short_name: netcre + mip: Amon + preprocessor: climate_mean_global + swcre: + derive: true + short_name: swcre + mip: Amon + preprocessor: climate_mean_global + lwcre: + derive: true + short_name: lwcre + mip: Amon + preprocessor: climate_mean_global + scripts: + single_model: + script: iht_toa/single_model_diagnostics.py diff --git a/esmvaltool/recipes/recipe_kcs.yml b/esmvaltool/recipes/recipe_kcs.yml index cbb7e85592..6eebb2c0e6 100644 --- a/esmvaltool/recipes/recipe_kcs.yml +++ b/esmvaltool/recipes/recipe_kcs.yml @@ -64,7 +64,11 @@ preprocessors: standardize: false multi_model_statistics: span: full - statistics: [p10, p90] + statistics: + - operator: percentile + percent: 10 + - operator: percentile + percent: 90 preprocessor_local: &extract_NL extract_point: longitude: 6.25 @@ -91,7 +95,7 @@ diagnostics: global_matching: script: kcs/global_matching.py scenario_years: [2050, 2085] - scenario_percentiles: [P10, P90] + scenario_percentiles: [Percentile10, Percentile90] local_resampling: description: > diff --git a/esmvaltool/recipes/recipe_perfmetrics_CMIP5.yml b/esmvaltool/recipes/recipe_perfmetrics_CMIP5.yml index ee7c8e96e1..f1ff96eb64 100644 --- a/esmvaltool/recipes/recipe_perfmetrics_CMIP5.yml +++ b/esmvaltool/recipes/recipe_perfmetrics_CMIP5.yml @@ -613,7 +613,7 @@ diagnostics: variables: pr: preprocessor: ppNOLEV1 - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.2 mip: Amon project: CMIP5 exp: historical @@ -670,8 +670,7 @@ diagnostics: - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: GPCP-SG, project: obs4MIPs, level: L3, - version: v2.2, tier: 1} + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, tier: 1} scripts: grading: <<: *grading_settings @@ -811,8 +810,7 @@ diagnostics: - {dataset: MRI-CGCM3} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, - version: Ed2-7, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} scripts: latlon: <<: *latlon_settings @@ -883,8 +881,7 @@ diagnostics: - {dataset: MRI-CGCM3} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, - version: Ed2-7, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} scripts: latlon: <<: *latlon_settings @@ -958,8 +955,7 @@ diagnostics: - {dataset: MRI-CGCM3} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, - version: Ed2-7, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} scripts: grading: <<: *grading_settings @@ -975,7 +971,7 @@ diagnostics: variables: hus: preprocessor: pp400 - reference_dataset: AIRS + reference_dataset: AIRS-2-1 alternative_dataset: ERA-Interim mip: Amon project: CMIP5 @@ -1024,10 +1020,9 @@ diagnostics: - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: AIRS, project: obs4MIPs, level: L3, - version: RetStd-v5, tier: 1} - - {dataset: ERA-Interim, project: OBS6, type: reanaly, - version: 1, tier: 3} + - {dataset: AIRS-2-1, project: obs4MIPs, level: L3, tier: 1} + - {dataset: ERA-Interim, project: OBS6, + type: reanaly, version: 1, tier: 3} scripts: grading: <<: *grading_settings @@ -1090,8 +1085,7 @@ diagnostics: - {dataset: MRI-CGCM3} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, - version: Ed2-7, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} scripts: grading: <<: *grading_settings diff --git a/esmvaltool/recipes/recipe_perfmetrics_CMIP5_4cds.yml b/esmvaltool/recipes/recipe_perfmetrics_CMIP5_4cds.yml index b9e6b1ad1e..6e80cef108 100644 --- a/esmvaltool/recipes/recipe_perfmetrics_CMIP5_4cds.yml +++ b/esmvaltool/recipes/recipe_perfmetrics_CMIP5_4cds.yml @@ -1025,7 +1025,7 @@ diagnostics: variables: hus: preprocessor: pp400 - reference_dataset: AIRS + reference_dataset: AIRS-2-1 alternative_dataset: ERA-Interim mip: Amon project: CMIP5 @@ -1074,9 +1074,9 @@ diagnostics: - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: AIRS, project: obs4MIPs, level: L3, version: RetStd-v5, tier: 1} - - {dataset: ERA-Interim, project: OBS6, type: reanaly, - version: 1, tier: 3} + - {dataset: AIRS-2-1, project: obs4MIPs, level: L3, tier: 1} + - {dataset: ERA-Interim, project: OBS6, + type: reanaly, version: 1, tier: 3} scripts: grading: <<: *grading_settings @@ -1271,7 +1271,7 @@ diagnostics: variables: pr: preprocessor: ppNOLEV1 - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.2 mip: Amon project: CMIP5 exp: historical @@ -1328,7 +1328,7 @@ diagnostics: - {dataset: MRI-ESM1} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.2, tier: 1} + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, tier: 1} scripts: grading: <<: *grading_settings @@ -1477,7 +1477,7 @@ diagnostics: - {dataset: MRI-CGCM3} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} scripts: grading: <<: *grading_settings @@ -1544,7 +1544,7 @@ diagnostics: - {dataset: MRI-CGCM3} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} scripts: grading: <<: *grading_settings diff --git a/esmvaltool/recipes/recipe_perfmetrics_land_CMIP5.yml b/esmvaltool/recipes/recipe_perfmetrics_land_CMIP5.yml index 834b97e158..11ed80bbaa 100644 --- a/esmvaltool/recipes/recipe_perfmetrics_land_CMIP5.yml +++ b/esmvaltool/recipes/recipe_perfmetrics_land_CMIP5.yml @@ -420,7 +420,7 @@ diagnostics: rlus: short_name: rlus preprocessor: ppNOLEV1 - reference_dataset: CERES-EBAF + reference_dataset: CERES-EBAF_Surface mip: Amon project: CMIP5 exp: historical @@ -460,9 +460,7 @@ diagnostics: - {dataset: MRI-CGCM3} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, - version: Ed2-8, tier: 1, start_year: 2001, end_year: 2015} + - {dataset: CERES-EBAF_Surface, project: obs4MIPs, level: L3B, tier: 1, start_year: 2001, end_year: 2013} scripts: grading: <<: *grading_settings @@ -478,7 +476,7 @@ diagnostics: rlds: short_name: rlds preprocessor: ppNOLEV1 - reference_dataset: CERES-EBAF + reference_dataset: CERES-EBAF_Surface mip: Amon project: CMIP5 exp: historical @@ -519,9 +517,7 @@ diagnostics: - {dataset: MRI-CGCM3} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, - version: Ed2-8, tier: 1, start_year: 2001, end_year: 2015} + - {dataset: CERES-EBAF_Surface, project: obs4MIPs, level: L3B, tier: 1, start_year: 2001, end_year: 2013} scripts: grading: <<: *grading_settings @@ -537,7 +533,7 @@ diagnostics: rsus: short_name: rsus preprocessor: ppNOLEV1 - reference_dataset: CERES-EBAF + reference_dataset: CERES-EBAF_Surface mip: Amon project: CMIP5 exp: historical @@ -577,9 +573,7 @@ diagnostics: - {dataset: MRI-CGCM3} - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, - version: Ed2-8, tier: 1, start_year: 2001, end_year: 2015} + - {dataset: CERES-EBAF_Surface, project: obs4MIPs, level: L3B, tier: 1, start_year: 2001, end_year: 2013} scripts: grading: <<: *grading_settings @@ -595,7 +589,7 @@ diagnostics: rsds: short_name: rsds preprocessor: ppNOLEV1 - reference_dataset: CERES-EBAF + reference_dataset: CERES-EBAF_Surface mip: Amon project: CMIP5 exp: historical @@ -637,8 +631,7 @@ diagnostics: - {dataset: NorESM1-M} - {dataset: NorESM1-ME} - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, - version: Ed2-8, tier: 1, start_year: 2001, end_year: 2015} + - {dataset: CERES-EBAF_Surface, project: obs4MIPs, level: L3B, tier: 1, start_year: 2001, end_year: 2013} scripts: grading: <<: *grading_settings diff --git a/esmvaltool/recipes/recipe_psyplot.yml b/esmvaltool/recipes/recipe_psyplot.yml index 4a07f4c558..c34ee203f0 100644 --- a/esmvaltool/recipes/recipe_psyplot.yml +++ b/esmvaltool/recipes/recipe_psyplot.yml @@ -2,13 +2,13 @@ # recipe_psyplot.yml --- documentation: - title: > - Create arbitrary Psyplot plots. + title: Example recipe for the Psyplot diagnostic. description: > This recipes showcases the use of the Psyplot diagnostic that provides a high-level interface to Psyplot for ESMValTool recipes. For each input - dataset, an individual plot is created. + dataset, an individual plot is created. With the Psyplot diagnostic, + arbitrary Psyplot plots can be created. authors: - schlund_manuel diff --git a/esmvaltool/recipes/recipe_quantilebias.yml b/esmvaltool/recipes/recipe_quantilebias.yml index 57eee9d05e..2da81b5960 100644 --- a/esmvaltool/recipes/recipe_quantilebias.yml +++ b/esmvaltool/recipes/recipe_quantilebias.yml @@ -20,8 +20,8 @@ documentation: - c3s-magic datasets: - - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1979, end_year: 2005} - - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.3, tier: 1, start_year: 1979, end_year: 2005} + - {dataset: CanESM2, project: CMIP5, exp: historical, ensemble: r1i1p1, start_year: 1979, end_year: 2005} + - {dataset: GPCP-V2.3, project: obs4MIPs, level: L3, tier: 1, start_year: 1979, end_year: 2005} preprocessors: mask_regrid_preproc: @@ -37,7 +37,7 @@ diagnostics: variables: pr: preprocessor: mask_regrid_preproc - reference_dataset: "GPCP-SG" + reference_dataset: "GPCP-V2.3" mip: Amon scripts: diff --git a/esmvaltool/recipes/recipe_radiation_budget.yml b/esmvaltool/recipes/recipe_radiation_budget.yml index 2f56cb4f9c..4a81307283 100644 --- a/esmvaltool/recipes/recipe_radiation_budget.yml +++ b/esmvaltool/recipes/recipe_radiation_budget.yml @@ -52,13 +52,13 @@ diagnostics: mip: Amon preprocessor: single_value additional_datasets: - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, start_year: 2000, end_year: 2010, tier: 1} rsutcs: mip: Amon preprocessor: single_value additional_datasets: - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, start_year: 2000, end_year: 2010, tier: 1} rsds: mip: Amon @@ -70,13 +70,13 @@ diagnostics: mip: Amon preprocessor: single_value additional_datasets: - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, start_year: 2000, end_year: 2010, tier: 1} rlutcs: mip: Amon preprocessor: single_value additional_datasets: - - {dataset: CERES-EBAF, project: obs4mips, level: L3B, version: Ed2-7, + - {dataset: CERES-EBAF, project: obs4mips, level: L3B, start_year: 2000, end_year: 2010, tier: 1} rlds: mip: Amon diff --git a/esmvaltool/recipes/recipe_schlund20esd.yml b/esmvaltool/recipes/recipe_schlund20esd.yml index 28d9e83375..041ae07e41 100644 --- a/esmvaltool/recipes/recipe_schlund20esd.yml +++ b/esmvaltool/recipes/recipe_schlund20esd.yml @@ -858,9 +858,9 @@ diagnostics: mip: Amon start_year: 1986 end_year: 2005 - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.2 additional_datasets: - - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.2, tier: 1} + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, tier: 1} additional_datasets: - {dataset: ACCESS1-0} - {dataset: ACCESS1-3} @@ -919,9 +919,9 @@ diagnostics: mip: Amon start_year: 1986 end_year: 2005 - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.2 additional_datasets: - - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.2, tier: 1} + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, tier: 1} additional_datasets: - {dataset: ACCESS-CM2, ensemble: r1i1p1f1, grid: gn, institute: CSIRO-ARCCSS} - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, grid: gn} @@ -994,9 +994,9 @@ diagnostics: mip: Amon start_year: 2003 end_year: 2005 - reference_dataset: AIRS + reference_dataset: AIRS-2-1 additional_datasets: - - {dataset: AIRS, project: obs4MIPs, level: L3, version: RetStd-v5, tier: 1} + - {dataset: AIRS-2-1, project: obs4MIPs, level: L3, tier: 1} additional_datasets: - {dataset: ACCESS1-0} - {dataset: ACCESS1-3} @@ -1055,9 +1055,9 @@ diagnostics: mip: Amon start_year: 2003 end_year: 2005 - reference_dataset: AIRS + reference_dataset: AIRS-2-1 additional_datasets: - - {dataset: AIRS, project: obs4MIPs, level: L3, version: RetStd-v5, tier: 1} + - {dataset: AIRS-2-1, project: obs4MIPs, level: L3, tier: 1} additional_datasets: - {dataset: ACCESS-CM2, ensemble: r1i1p1f1, grid: gn, institute: CSIRO-ARCCSS} - {dataset: ACCESS-ESM1-5, ensemble: r1i1p1f1, grid: gn} @@ -1283,7 +1283,7 @@ diagnostics: end_year: 2005 reference_dataset: CERES-EBAF additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} rsutcs: <<: *var_settings_cmip5 preprocessor: default @@ -1292,7 +1292,7 @@ diagnostics: end_year: 2005 reference_dataset: CERES-EBAF additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} rsdt: <<: *var_settings_cmip5 preprocessor: default @@ -1301,7 +1301,7 @@ diagnostics: end_year: 2005 reference_dataset: CERES-EBAF additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} additional_datasets: - {dataset: ACCESS1-0} - {dataset: ACCESS1-3} @@ -1380,7 +1380,7 @@ diagnostics: end_year: 2005 reference_dataset: CERES-EBAF additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} rsutcs: <<: *var_settings_cmip6 preprocessor: default @@ -1389,7 +1389,7 @@ diagnostics: end_year: 2005 reference_dataset: CERES-EBAF additional_datasets: - - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, version: Ed2-7, tier: 1} + - {dataset: CERES-EBAF, project: obs4MIPs, level: L3B, tier: 1} rsdt: <<: *var_settings_cmip6 preprocessor: default @@ -1885,7 +1885,7 @@ diagnostics: end_year: 2010 reference_dataset: 'AIRS-2-0|MLS-AURA' additional_datasets: - - {dataset: AIRS-2-0, project: obs4MIPs, level: L3, version: v2, tier: 1} + - {dataset: AIRS-2-0, project: obs4MIPs, level: L3, tier: 1} - {dataset: MLS-AURA, project: OBS6, type: sat, version: '004', tier: 3} additional_datasets: - {dataset: ACCESS1-0} @@ -1940,7 +1940,7 @@ diagnostics: end_year: 2010 reference_dataset: 'AIRS-2-0|MLS-AURA' additional_datasets: - - {dataset: AIRS-2-0, project: obs4MIPs, level: L3, version: v2, tier: 1} + - {dataset: AIRS-2-0, project: obs4MIPs, level: L3, tier: 1} - {dataset: MLS-AURA, project: OBS6, type: sat, version: '004', tier: 3} additional_datasets: - {dataset: ACCESS-CM2, ensemble: r1i1p1f1, grid: gn, institute: CSIRO-ARCCSS} diff --git a/esmvaltool/recipes/recipe_seaborn.yml b/esmvaltool/recipes/recipe_seaborn.yml index faf0f07085..983efae0be 100644 --- a/esmvaltool/recipes/recipe_seaborn.yml +++ b/esmvaltool/recipes/recipe_seaborn.yml @@ -2,14 +2,14 @@ # recipe_seaborn.yml --- documentation: - title: > - Create arbitrary Seaborn plots. + title: Example recipe for the Seaborn diagnostic. description: > This recipe showcases the use of the Seaborn diagnostic that provides a high-level interface to Seaborn for ESMValTool recipes. For this, the input data is arranged into a single `pandas.DataFrame`, which is then used as - input for the Seaborn function defined by the option `seaborn_func`. + input for the Seaborn function defined by the option `seaborn_func`. With + the Seaborn diagnostic, arbitrary Seaborn plots can be created. authors: - schlund_manuel diff --git a/esmvaltool/recipes/recipe_shapeselect.yml b/esmvaltool/recipes/recipe_shapeselect.yml index f37a302dff..0fb22c0d5d 100644 --- a/esmvaltool/recipes/recipe_shapeselect.yml +++ b/esmvaltool/recipes/recipe_shapeselect.yml @@ -11,7 +11,7 @@ documentation: - berg_peter maintainer: - - unmaintained + - ruhe_lukas projects: - c3s-magic diff --git a/esmvaltool/recipes/recipe_smpi_4cds.yml b/esmvaltool/recipes/recipe_smpi_4cds.yml index 5f946986b4..3a60419d69 100644 --- a/esmvaltool/recipes/recipe_smpi_4cds.yml +++ b/esmvaltool/recipes/recipe_smpi_4cds.yml @@ -236,9 +236,9 @@ diagnostics: pr: <<: *variable_settings preprocessor: ppNOLEV - reference_dataset: GPCP-SG + reference_dataset: GPCP-V2.2 additional_datasets: - - {dataset: GPCP-SG, project: obs4MIPs, level: L3, version: v2.2, tier: 1} + - {dataset: GPCP-V2.2, project: obs4MIPs, level: L3, tier: 1} scripts: grading: script: perfmetrics/main.ncl diff --git a/esmvaltool/recipes/recipe_wenzel14jgr.yml b/esmvaltool/recipes/recipe_wenzel14jgr.yml index cd66bfc64c..814da02a18 100644 --- a/esmvaltool/recipes/recipe_wenzel14jgr.yml +++ b/esmvaltool/recipes/recipe_wenzel14jgr.yml @@ -26,14 +26,14 @@ preprocessors: area_type: land exclude: [ 'GCP2018', - ] + ] sea_fraction_weighting: weighting_landsea_fraction: area_type: sea exclude: [ 'GCP2018', 'HadGEM2-ES', 'CanESM2', 'IPSL-CM5A-LR', 'GFDL-ESM2M', 'MPI-ESM-LR', 'CESM1-BGC', 'NorESM1-ME', - ] + ] regrid: target_grid: CanESM2 scheme: area_weighted @@ -43,30 +43,30 @@ preprocessors: area_type: land exclude: [ 'GCP2018', 'HadGEM2-ES', 'CanESM2', 'IPSL-CM5A-LR', 'GFDL-ESM2M', 'MPI-ESM-LR', 'CESM1-BGC', 'NorESM1-ME', - ] + ] area_statistics: - operator: sum + operator: sum tropical_land: weighting_landsea_fraction: area_type: land exclude: [ 'GCP2018', 'HadGEM2-ES', 'CanESM2', 'IPSL-CM5A-LR', 'GFDL-ESM2M', 'MPI-ESM-LR', 'CESM1-BGC', 'NorESM1-ME', - ] + ] extract_region: start_longitude: 0. end_longitude: 360. start_latitude: -30. end_latitude: 30. area_statistics: - operator: sum + operator: sum global_ocean: weighting_landsea_fraction: area_type: sea exclude: [ 'GCP2018', 'HadGEM2-ES', 'CanESM2', 'IPSL-CM5A-LR', 'GFDL-ESM2M', 'MPI-ESM-LR', 'CESM1-BGC', 'NorESM1-ME', - ] + ] area_statistics: operator: sum @@ -77,7 +77,7 @@ preprocessors: start_latitude: -30. end_latitude: 30. area_statistics: - operator: mean + operator: mean diagnostics: diff --git a/esmvaltool/references/fox-kemper21ipcc.bibtex b/esmvaltool/references/fox-kemper21ipcc.bibtex new file mode 100644 index 0000000000..e26a480f46 --- /dev/null +++ b/esmvaltool/references/fox-kemper21ipcc.bibtex @@ -0,0 +1,11 @@ +@inbook{fox-kemper21ipcc, + author={B. Fox-Kemper and H.T. Hewitt and C. Xiao and G. Aðalgeirsdóttir and S.S. Drijfhout and T.L. Edwards and N.R. Golledge and M. Hemer and R.E. Kopp and G. Krinner and A. Mix and D. Notz and S. Nowicki and I.S. Nurhati and L. Ruiz and J.-B. Sallée and A.B.A. Slangen and Y. Yu}, + editor={V. Masson-Delmotte and P. Zhai and A. Pirani and S.L. Connors and C. Pean and S. Berger and N. Caud and Y. Chen and L. Goldfarb and M.I. Gomis and M. Huang and K. Leitzell and E. Lonnoy and J.B.R. Matthews and T.K. Maycock and T. Waterfield and O. Yelekci and R. Yu and B. Zhou}, + title={Ocean, Cryosphere and Sea Level Change}, + booktitle={Climate Change 2021: The Physical Science Basis. Contribution of Working Group I to the Sixth Assessment Report of the Intergovernmental Panel on Climate Change}, + year={2021}, + publisher={Cambridge University Press}, + address={Cambridge, UK and New York, NY, USA}, + pages = {1211--1362}, + doi={10.1017/9781009157896.011} +} diff --git a/esmvaltool/references/merra.bibtex b/esmvaltool/references/merra.bibtex new file mode 100644 index 0000000000..aa1df2c53d --- /dev/null +++ b/esmvaltool/references/merra.bibtex @@ -0,0 +1,13 @@ +@article{merra, + doi = {10.1175/JCLI-D-11-00015.1}, + url = {https://journals.ametsoc.org/view/journals/clim/24/14/jcli-d-11-00015.1.xml}, + year = 2011, + month = {jul}, + publisher = {American Meteorological Society}, + volume = {24}, + number = {14}, + pages = {3624--3648}, + author = {Michele M. Rienecker and Max J. Suarez and Ronald Gelaro and Ricardo Todling and Julio Bacmeister and Emily Liu and Michael G. Bosilovich and Siegfried D. Schubert and Lawrence Takacs and Gi-Kong Kim and Stephen Bloom and Junye Chen and Douglas Collins and Austin Conaty and Arlindo da Silva and Wei Gu and Joanna Joiner and Randal D. Koster and Robert Lucchesi and Andrea Molod and Tommy Owens and Steven Pawson and Philip Pegion and Christopher R. Redder and Rolf Reichle and Franklin R. Robertson and Albert G. Ruddick and Meta Sienkiewicz and Jack Woollen}, + title = {{MERRA}: {NASA}’s Modern-Era Retrospective Analysis for Research and Applications}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/references/mobo_dic2004_2019.bibtex b/esmvaltool/references/mobo_dic2004_2019.bibtex new file mode 100644 index 0000000000..928fc33afd --- /dev/null +++ b/esmvaltool/references/mobo_dic2004_2019.bibtex @@ -0,0 +1,12 @@ +@article{mobo_dic2004_2019, + doi = {10.1029/2022gb007677}, + url = {https://doi.org/10.1029/2022gb007677}, + year = {2023}, + month = may, + publisher = {American Geophysical Union ({AGU})}, + volume = {37}, + number = {5}, + author = {L. Keppler and P. Landschützer and S. K. Lauvset and N. Gruber}, + title = {Recent Trends and Variability in the Oceanic Storage of Dissolved Inorganic Carbon}, + journal = {Global Biogeochemical Cycles} +} diff --git a/esmvaltool/references/noaa-cires-20cr.bibtex b/esmvaltool/references/noaa-cires-20cr-v2.bibtex similarity index 62% rename from esmvaltool/references/noaa-cires-20cr.bibtex rename to esmvaltool/references/noaa-cires-20cr-v2.bibtex index 54d67ea7f3..79314b1d4b 100644 --- a/esmvaltool/references/noaa-cires-20cr.bibtex +++ b/esmvaltool/references/noaa-cires-20cr-v2.bibtex @@ -1,9 +1,11 @@ -@article{noaa-cire-20cr, +@article{noaa-cires-20cr-v2, doi = {10.1002/qj.776}, url = {https://doi.org/10.1002/qj.776}, - publisher = {Royal Meteorological Society}, - author = {Compo, G.P., Whitaker, J.S., Sardeshmukh, P.D., Matsui, N., Allan, R.J., Yin, X., Gleason, B.E., Vose, R.S., Rutledge, G., Bessemoulin, P., Brönnimann, S., Brunet, M., Crouthamel, R.I., Grant, A.N., Groisman, P.Y., Jones, P.D., Kruk, M.C., Kruger, A.C., Marshall, G.J., Maugeri, M., Mok, H.Y., Nordli, Ø., Ross, T.F., Trigo, R.M., Wang, X.L., Woodruff, S.D. and Worley, S.J.}, + author = {Compo, G.P., Whitaker, J.S., Sardeshmukh, P.D., Matsui, N., Allan, R.J., Yin, X., Gleason, B.E., Vose, R.S., Rutledge, G., Bessemoulin, P., Brönnimann, S., Brunet, M., Crouthamel, R.I., Grant, A.N., Groisman, P.Y., Jones, P.D., Kruk, M.C., Kruger, A.C., Marshall, G.J., Maugeri, M., Mok, H.Y., Nordli, O., Ross, T.F., Trigo, R.M., Wang, X.L., Woodruff, S.D. and Worley, S.J.}, title = {The Twentieth Century Reanalysis Project}, - year = {2011} + year = {2011}, journal = {Quarterly J. Roy. Meteorol. Soc.} + volume = {137}, + number = {654}, + pages = {1-28}, } diff --git a/esmvaltool/references/noaa-cires-20cr-v3.bibtex b/esmvaltool/references/noaa-cires-20cr-v3.bibtex new file mode 100644 index 0000000000..d1bf482e9d --- /dev/null +++ b/esmvaltool/references/noaa-cires-20cr-v3.bibtex @@ -0,0 +1,11 @@ +@article{noaa-cires-20cr-v3, + journal = {Quarterly J. Roy. Meteorol. Soc.} + author = {Slivinski, Laura C. and Compo, Gilbert P. and Whitaker, Jeffrey S. and Sardeshmukh, Prashant D. and Giese, Benjamin S. and McColl, Chesley and Allan, Rob and Yin, Xungang and Vose, Russell and Titchner, Holly and Kennedy, John and Spencer, Lawrence J. and Ashcroft, Linden and Brönnimann, Stefan and Brunet, Manola and Camuffo, Dario and Cornes, Richard and Cram, Thomas A. and Crouthamel, Richard and Domínguez-Castro, Fernando and Freeman, J. Eric and Gergis, Joëlle and Hawkins, Ed and Jones, Philip D. and Jourdain, Sylvie and Kaplan, Alexey and Kubota, Hisayuki and Blancq, Frank Le and Lee, Tsz-Cheung and Lorrey, Andrew and Luterbacher, Jürg and Maugeri, Maurizio and Mock, Cary J. and Moore, G.W. Kent and Przybylak, Rajmund and Pudmenzky, Christa and Reason, Chris and Slonosky, Victoria C. and Smith, Catherine A. and Tinz, Birger and Trewin, Blair and Valente, Maria Antónia and Wang, Xiaolan L. and Wilkinson, Clive and Wood, Kevin and Wyszynski, Przemysxslaw}, + title = {Towards a more reliable historical reanalysis: Improvements for version 3 of the Twentieth Century Reanalysis system}, + volume = {145}, + number = {724}, + pages = {2876-2908}, + doi = {https://doi.org/10.1002/qj.3598}, + url = {https://rmets.onlinelibrary.wiley.com/doi/abs/10.1002/qj.3598}, + year = {2019} +} diff --git a/esmvaltool/references/noaa-mbl-ch4.bibtex b/esmvaltool/references/noaa-mbl-ch4.bibtex new file mode 100644 index 0000000000..e4c6a17cbd --- /dev/null +++ b/esmvaltool/references/noaa-mbl-ch4.bibtex @@ -0,0 +1,9 @@ +@misc{noaa-mbl-ch4, + doi = {https://doi.org/10.15138/P8XG-AA10}, + url = {https://gml.noaa.gov/ccgg/trends_ch4/}, + year = 2023, + month = {jul}, + author = {Lan, X., K.W. Thoning, and E.J. Dlugokencky}, + title = {Trends in globally-averaged CH4, N2O, and SF6 determined from NOAA Global Monitoring Laboratory measurements. Version 2023-09}, + howpublished = {via website https://gml.noaa.gov/ccgg/trends_ch4/, provided by the NOAA/GML, Boulder, Colorado, USA} +} diff --git a/esmvaltool/references/pearce23jclim.bibtex b/esmvaltool/references/pearce23jclim.bibtex new file mode 100644 index 0000000000..ebfac532ad --- /dev/null +++ b/esmvaltool/references/pearce23jclim.bibtex @@ -0,0 +1,13 @@ +@article{pearce23jclim, + doi = {10.1175/JCLI-D-22-0149.1}, + url = {https://doi.org/10.1175/JCLI-D-22-0149.1}, + year = 2023, + month = {jun}, + publisher = {American Meteorological Society}, + volume = {36}, + number = {9}, + pages = {1--30}, + author = {Francesca Pearce and Alejandro Bodas-Salcedo}, + title = {Implied heat transport from {CERES} data: direct radiative effect of clouds on regional patterns and hemispheric symmetry}, + journal = {Journal of Climate} +} diff --git a/esmvaltool/utils/batch-jobs/generate.py b/esmvaltool/utils/batch-jobs/generate.py index dd7a46d83f..fc9deb8339 100644 --- a/esmvaltool/utils/batch-jobs/generate.py +++ b/esmvaltool/utils/batch-jobs/generate.py @@ -51,6 +51,9 @@ # Full path to config_file # If none, ~/.esmvaltool/config-user.yml is used config_file = '' +# Set max_parallel_tasks +# If none, read from config_file +default_max_parallel_tasks = 8 # List of recipes that require non-default SLURM options set above SPECIAL_RECIPES = { @@ -74,12 +77,17 @@ }, 'recipe_climate_change_hotspot': { 'partition': '#SBATCH --partition=compute \n', + 'memory': '#SBATCH --constraint=512G \n', }, 'recipe_collins13ipcc': { 'partition': '#SBATCH --partition=compute \n', 'time': '#SBATCH --time=08:00:00 \n', 'memory': '#SBATCH --constraint=512G \n', }, + 'recipe_daily_era5': { + 'partition': '#SBATCH --partition=compute \n', + 'memory': '#SBATCH --constraint=512G \n', + }, 'recipe_eady_growth_rate': { 'partition': '#SBATCH --partition=compute \n', }, @@ -125,6 +133,7 @@ }, 'recipe_ipccwg1ar6ch3_fig_3_43': { 'partition': '#SBATCH --partition=compute \n', + 'time': '#SBATCH --time=08:00:00 \n', }, 'recipe_lauer22jclim_fig3-4_zonal': { 'partition': '#SBATCH --partition=compute \n', @@ -137,6 +146,7 @@ }, 'recipe_mpqb_xch4': { 'partition': '#SBATCH --partition=compute \n', + 'memory': '#SBATCH --constraint=512G \n', }, 'recipe_perfmetrics_CMIP5': { 'partition': '#SBATCH --partition=compute \n', @@ -164,6 +174,9 @@ 'recipe_schlund20jgr_gpp_change_rcp85': { 'partition': '#SBATCH --partition=compute \n', }, + 'recipe_sea_surface_salinity': { + 'partition': '#SBATCH --partition=compute \n', + }, 'recipe_smpi': { 'partition': '#SBATCH --partition=compute \n', }, @@ -174,6 +187,9 @@ 'partition': '#SBATCH --partition=compute \n', 'time': '#SBATCH --time=08:00:00 \n', }, + 'recipe_thermodyn_diagtool': { + 'partition': '#SBATCH --partition=compute \n', + }, 'recipe_wenzel16jclim': { 'partition': '#SBATCH --partition=compute \n', }, @@ -182,26 +198,28 @@ }, } +# These recipes cannot be run with the default number of parallel +# tasks (max_parallel_tasks=8). # These recipes either use CMIP3 input data # (see https://github.com/ESMValGroup/ESMValCore/issues/430) -# and recipes where tasks require the full compute node memory. -ONE_TASK_RECIPES = [ - 'recipe_bock20jgr_fig_1-4', - 'recipe_bock20jgr_fig_6-7', - 'recipe_bock20jgr_fig_8-10', - 'recipe_flato13ipcc_figure_96', - 'recipe_flato13ipcc_figures_938_941_cmip3', - 'recipe_ipccwg1ar6ch3_fig_3_9', - 'recipe_ipccwg1ar6ch3_fig_3_42_a', - 'recipe_ipccwg1ar6ch3_fig_3_43', - 'recipe_check_obs', - 'recipe_collins13ipcc', - 'recipe_lauer22jclim_fig3-4_zonal', - 'recipe_lauer22jclim_fig5_lifrac', - 'recipe_smpi', - 'recipe_smpi_4cds', - 'recipe_wenzel14jgr', - ] +# or require a large fraction of the compute node memory. +MAX_PARALLEL_TASKS = { + 'recipe_bock20jgr_fig_1-4': 1, + 'recipe_bock20jgr_fig_6-7': 1, + 'recipe_bock20jgr_fig_8-10': 1, + 'recipe_flato13ipcc_figure_96': 1, + 'recipe_flato13ipcc_figures_938_941_cmip3': 1, + 'recipe_ipccwg1ar6ch3_fig_3_9': 1, + 'recipe_ipccwg1ar6ch3_fig_3_42_a': 1, + 'recipe_ipccwg1ar6ch3_fig_3_43': 1, + 'recipe_check_obs': 1, + 'recipe_collins13ipcc': 1, + 'recipe_lauer22jclim_fig3-4_zonal': 3, + 'recipe_lauer22jclim_fig5_lifrac': 3, + 'recipe_smpi': 1, + 'recipe_smpi_4cds': 1, + 'recipe_wenzel14jgr': 1, +} def generate_submit(): @@ -272,9 +290,12 @@ def generate_submit(): else: file.write(f'esmvaltool run --config_file ' f'{str(config_file)} {str(recipe)}') - if recipe.stem in ONE_TASK_RECIPES: - file.write(' --max_parallel_tasks=1') - + # set max_parallel_tasks + max_parallel_tasks = MAX_PARALLEL_TASKS.get( + recipe.stem, + default_max_parallel_tasks, + ) + file.write(f' --max_parallel_tasks={max_parallel_tasks}\n') if submit: subprocess.check_call(['sbatch', filename]) diff --git a/esmvaltool/utils/batch-jobs/parse_recipes_output.py b/esmvaltool/utils/batch-jobs/parse_recipes_output.py index a8436d3c31..9df80c6cc0 100644 --- a/esmvaltool/utils/batch-jobs/parse_recipes_output.py +++ b/esmvaltool/utils/batch-jobs/parse_recipes_output.py @@ -7,10 +7,8 @@ a GitHub issue or any other such documentation. """ import datetime -import os - import glob - +import os # User change needed # directory where SLURM output files (.out and .err) are @@ -81,11 +79,13 @@ def display_in_md(): todaynow = datetime.datetime.now() print(f"## Recipe running session {todaynow}\n") with open("all_recipes.txt", "r", encoding='utf-8') as allrecs: - all_recs = [rec.strip() for rec in allrecs.readlines()] + all_recs = [ + os.path.basename(rec.strip()) for rec in allrecs.readlines() + ] # parse different types of recipe outcomes recipe_list, failed, missing_dat = parse_output_file() - print("### Succesfully run recipes\n\n") + print("### Successfully ran recipes\n\n") print(f"{len(recipe_list)} out of {len(all_recs)}\n") for rec in recipe_list: print("- " + rec) diff --git a/esmvaltool/utils/draft_release_notes.py b/esmvaltool/utils/draft_release_notes.py index d00e752455..13bcb4324c 100644 --- a/esmvaltool/utils/draft_release_notes.py +++ b/esmvaltool/utils/draft_release_notes.py @@ -10,6 +10,7 @@ """ import datetime from pathlib import Path +from zoneinfo import ZoneInfo import dateutil import esmvalcore @@ -41,23 +42,24 @@ } PREVIOUS_RELEASE = { - 'esmvalcore': datetime.datetime(2022, 10, 13, 18), - 'esmvaltool': datetime.datetime(2022, 10, 28, 18), + 'esmvalcore': + datetime.datetime(2023, 7, 4, 11, tzinfo=ZoneInfo("CET")), + 'esmvaltool': + datetime.datetime(2022, 10, 28, 18, tzinfo=ZoneInfo("CET")), } - LABELS = { 'esmvalcore': ( + 'backwards incompatible change', # important, keep at the top + 'deprecated feature', # important, keep at the top + 'bug', # important, keep at the top 'api', - 'backwards incompatible change', - 'bug', 'cmor', 'containerization', 'community', + 'dask', 'deployment', - 'deprecated feature', 'documentation', - 'enhancement', 'fix for dataset', 'installation', 'iris', @@ -65,20 +67,21 @@ 'release', 'testing', 'UX', - 'variable derivation' + 'variable derivation', + 'enhancement', # uncategorized, keep at the bottom ), 'esmvaltool': ( - 'backwards incompatible change', - 'bug', + 'backwards incompatible change', # important, keep at the top + 'deprecated feature', # important, keep at the top + 'bug', # important, keep at the top 'community', - 'deprecated feature', 'documentation', 'diagnostic', 'preprocessor', 'observations', 'testing', 'installation', - 'enhancement', + 'enhancement', # uncategorized, keep at the bottom ) } @@ -87,6 +90,7 @@ 'deprecated feature': 'Deprecations', 'bug': 'Bug fixes', 'cmor': 'CMOR standard', + 'dask': 'Computational performance improvements', 'diagnostic': 'Diagnostics', 'fix for dataset': 'Fixes for datasets', 'observations': 'Observational and re-analysis dataset support', @@ -210,7 +214,8 @@ def _compose_note(pull): def main(): - """Entry point for the scrip.""" + """Entry point for the script.""" + def display(lines, out): text = "\n".join(lines) + "\n" out.write(text) diff --git a/esmvaltool/utils/recipe_filler.py b/esmvaltool/utils/recipe_filler.py index 90001674b1..40f637c6d5 100755 --- a/esmvaltool/utils/recipe_filler.py +++ b/esmvaltool/utils/recipe_filler.py @@ -63,15 +63,6 @@ CFG = {} -# standard libs from esmvalcore ported here to avoid private func import -def load_config_developer(cfg_file=None): - """Load the config developer file and initialize CMOR tables.""" - cfg_developer = read_config_developer_file(cfg_file) - for key, value in cfg_developer.items(): - CFG[key] = value - read_cmor_tables(CFG) - - def _purge_file_handlers(cfg: dict) -> None: """Remove handlers with filename set. @@ -258,7 +249,7 @@ def read_config_user_file(config_file, folder_name, options=None): cfg['run_dir'] = os.path.join(cfg['output_dir'], 'run') # Read developer configuration file - load_config_developer(cfg['config_developer_file']) + read_cmor_tables(cfg['config_developer_file']) return cfg @@ -300,16 +291,31 @@ def read_config_user_file(config_file, folder_name, options=None): } +def _get_download_dir(yamlconf, cmip_era): + """Get the Download Directory from user config file.""" + if 'download_dir' in yamlconf: + return os.path.join(yamlconf['download_dir'], cmip_era) + return False + + def _get_site_rootpath(cmip_era): """Get site (drs) from config-user.yml.""" config_yml = get_args().config_file with open(config_yml, 'r') as yamf: yamlconf = yaml.safe_load(yamf) drs = yamlconf['drs'][cmip_era] - rootdir = yamlconf['rootpath'][cmip_era] + + download_dir = _get_download_dir(yamlconf, cmip_era) + rootdir = [yamlconf['rootpath'][cmip_era], ] + + if download_dir: + rootdir.append(download_dir) logger.debug("%s root directory %s", cmip_era, rootdir) if drs == 'default' and 'default' in yamlconf['rootpath']: - rootdir = yamlconf['rootpath']['default'] + rootdir = [yamlconf['rootpath']['default'], ] + if download_dir: + rootdir.append(download_dir) + logger.debug("Using drs default and " "default: %s data directory", rootdir) @@ -336,6 +342,7 @@ def _determine_basepath(cmip_era): rootpaths = _get_site_rootpath(cmip_era)[1] else: rootpaths = [_get_site_rootpath(cmip_era)[1]] + basepaths = [] for rootpath in rootpaths: if _get_input_dir(cmip_era) != os.path.sep: diff --git a/esmvaltool/utils/testing/regression/compare.py b/esmvaltool/utils/testing/regression/compare.py index ca96b65a68..a4ee33c5d3 100644 --- a/esmvaltool/utils/testing/regression/compare.py +++ b/esmvaltool/utils/testing/regression/compare.py @@ -80,7 +80,7 @@ def diff_attrs(ref: dict, cur: dict) -> str: msg.append(f"missing attribute '{key}'") elif not np.array_equal(ref[key], cur[key]): msg.append(f"value of attribute '{key}' is different: " - f"expected '{cur[key]}' but found '{ref[key]}'") + f"expected '{ref[key]}' but found '{cur[key]}'") for key in cur: if key not in ref: msg.append(f"extra attribute '{key}' with value '{cur[key]}'") @@ -96,12 +96,15 @@ def diff_array(ref: np.ndarray, cur: np.ndarray) -> str: msg = [] if cur.shape != ref.shape: msg.append("data has different shape") + elif np.issubdtype(ref.dtype, np.inexact) and np.issubdtype( + cur.dtype, np.inexact): + if not np.array_equal(ref, cur, equal_nan=True): + if np.allclose(ref, cur, equal_nan=True): + msg.append("data is almost but not quite the same") + else: + msg.append("data is different") elif not np.array_equal(ref, cur): - if np.issubdtype(ref.dtype, np.inexact) and np.issubdtype( - cur.dtype, np.inexact) and np.allclose(ref, cur): - msg.append("data is almost but not quite the same") - else: - msg.append("data is different") + msg.append("data is different") return as_txt(msg) diff --git a/setup.py b/setup.py index 497815cf7e..bd584b7b3b 100755 --- a/setup.py +++ b/setup.py @@ -44,6 +44,7 @@ 'natsort', 'nc-time-axis', 'netCDF4', + 'numba', 'numpy!=1.24.3', # severe masking bug 'packaging', 'openpyxl', @@ -60,7 +61,9 @@ 'scikit-image', 'scikit-learn', 'scipy', - 'scitools-iris>=3.6.0', + # See the following issue for info on the iris pin below: + # https://github.com/ESMValGroup/ESMValTool/issues/3239#issuecomment-1613298587 + 'scitools-iris>=3.4.0', 'seaborn', 'seawater', 'shapely', diff --git a/tests/integration/diag_scripts/mlr/test_custom_sklearn_functions.py b/tests/integration/diag_scripts/mlr/test_custom_sklearn_functions.py index f14ccb3e2e..b058465188 100644 --- a/tests/integration/diag_scripts/mlr/test_custom_sklearn_functions.py +++ b/tests/integration/diag_scripts/mlr/test_custom_sklearn_functions.py @@ -459,10 +459,18 @@ def _more_tags(self): return {"allow_nan": True} +class MockBaseEstimator: + """Estimator with ``_get_tags``.""" + + def _get_tags(self): + """Return tags.""" + return _DEFAULT_TAGS + + @pytest.mark.parametrize( 'estimator,err_msg', [ - (BaseEstimator(), 'The key xxx is not defined in _get_tags'), + (MockBaseEstimator(), 'The key xxx is not defined in _get_tags'), (NoTagsEstimator(), 'The key xxx is not defined in _DEFAULT_TAGS'), ], ) @@ -480,9 +488,8 @@ def test_safe_tags_error(estimator, err_msg): (NoTagsEstimator(), 'allow_nan', _DEFAULT_TAGS['allow_nan']), (MoreTagsEstimator(), None, {**_DEFAULT_TAGS, **{'allow_nan': True}}), (MoreTagsEstimator(), 'allow_nan', True), - (BaseEstimator(), None, _DEFAULT_TAGS), - (BaseEstimator(), 'allow_nan', _DEFAULT_TAGS['allow_nan']), - (BaseEstimator(), 'allow_nan', _DEFAULT_TAGS['allow_nan']), + (MockBaseEstimator(), None, _DEFAULT_TAGS), + (MockBaseEstimator(), 'allow_nan', _DEFAULT_TAGS['allow_nan']), ], ) def test_safe_tags_no_get_tags(estimator, key, expected_results): diff --git a/tests/integration/test_recipe_filler.py b/tests/integration/test_recipe_filler.py index a0f3f85333..b78ac8c5f8 100644 --- a/tests/integration/test_recipe_filler.py +++ b/tests/integration/test_recipe_filler.py @@ -8,14 +8,8 @@ import pytest import yaml -import esmvaltool.utils.recipe_filler as recipe_filler -from esmvalcore.cmor.table import read_cmor_tables from esmvaltool.utils.recipe_filler import run -# Initialize with standard config developer file -std_config = recipe_filler.read_config_developer_file() -# Initialize CMOR tables -read_cmor_tables(std_config) # Load test configuration with open(os.path.join(os.path.dirname(__file__),